From c5481ead055c44b75d79cdc695f52b63f611e869 Mon Sep 17 00:00:00 2001 From: XinYi Song <2037158277@qq.com> Date: Wed, 1 Dec 2021 14:27:49 +0800 Subject: [PATCH] =?UTF-8?q?1=E3=80=81=E6=90=AD=E5=BB=BAflask=E6=A1=86?= =?UTF-8?q?=E6=9E=B6=E3=80=82=202=E3=80=81=E6=95=B4=E5=90=88JPSS=EF=BC=8C?= =?UTF-8?q?=E8=91=B5=E8=8A=B18=EF=BC=8CGF3=EF=BC=8C=E5=93=A8=E5=85=B51?= =?UTF-8?q?=EF=BC=8C=E5=93=A8=E5=85=B52=EF=BC=8C=E5=93=A8=E5=85=B53?= =?UTF-8?q?=EF=BC=8C=E8=B5=84=E6=BA=902=E5=8F=B7=EF=BC=8C=E7=8E=AF?= =?UTF-8?q?=E5=A2=831=E5=8F=B7=EF=BC=8CSNPP=E7=AD=89=E9=81=A5=E6=84=9F?= =?UTF-8?q?=E6=95=B0=E6=8D=AE=E8=A7=A3=E6=9E=90=E7=AE=97=E6=B3=95=E3=80=82?= =?UTF-8?q?=203=E3=80=81flask=E4=B8=AD=E6=B7=BB=E5=8A=A0=E6=89=AB=E6=8F=8F?= =?UTF-8?q?=E5=90=84=E4=B8=AA=E5=8D=AB=E6=98=9F=E6=89=AB=E6=8F=8F=E4=BB=BB?= =?UTF-8?q?=E5=8A=A1=EF=BC=8C=E5=AE=9A=E6=97=B6=E6=89=AB=E6=8F=8F=EF=BC=8C?= =?UTF-8?q?=E6=95=B0=E6=8D=AE=E5=85=A5=E5=BA=93?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .idea/.gitignore | 8 + .idea/dms.iml | 19 + .idea/inspectionProfiles/Project_Default.xml | 179 ++ .../inspectionProfiles/profiles_settings.xml | 6 + .idea/misc.xml | 4 + .idea/modules.xml | 8 + .idea/vcs.xml | 6 + __pycache__/app.cpython-39.pyc | Bin 0 -> 477 bytes app.py | 16 + application/__init__.py | 52 + .../__pycache__/__init__.cpython-39.pyc | Bin 0 -> 956 bytes application/settings/__init__.py | 44 + .../__pycache__/__init__.cpython-39.pyc | Bin 0 -> 807 bytes .../settings/__pycache__/dev.cpython-39.pyc | Bin 0 -> 421 bytes .../settings/__pycache__/prop.cpython-39.pyc | Bin 0 -> 401 bytes application/settings/dev.py | 9 + application/settings/prop.py | 6 + common/__init__.py | 0 common/__pycache__/__init__.cpython-39.pyc | Bin 0 -> 139 bytes common/config/__init__.py | 5 + .../__pycache__/__init__.cpython-39.pyc | Bin 0 -> 212 bytes .../config/__pycache__/factory.cpython-39.pyc | Bin 0 -> 791 bytes common/config/factory.py | 85 + common/tools/__init__.py | 5 + .../tools/__pycache__/__init__.cpython-39.pyc | Bin 0 -> 212 bytes common/tools/__pycache__/dms.cpython-39.pyc | Bin 0 -> 1404 bytes common/tools/dms.py | 40 + scan_data/GetMetaInfo.py | 1554 +++++++++++++++++ scan_data/__init__.py | 5 + .../__pycache__/GetMetaInfo.cpython-39.pyc | Bin 0 -> 30380 bytes scan_data/__pycache__/__init__.cpython-39.pyc | Bin 0 -> 209 bytes scan_data/__pycache__/example.cpython-39.pyc | Bin 0 -> 3887 bytes scan_data/example.py | 165 ++ util/__init__.py | 0 util/__pycache__/__init__.cpython-39.pyc | Bin 0 -> 137 bytes .../file_store_path.cpython-39.pyc | Bin 0 -> 1620 bytes .../http_file_upload.cpython-39.pyc | Bin 0 -> 1953 bytes util/__pycache__/http_util.cpython-39.pyc | Bin 0 -> 3421 bytes .../remote_sensing_util.cpython-39.pyc | Bin 0 -> 2803 bytes .../__pycache__/scan_file_util.cpython-39.pyc | Bin 0 -> 15999 bytes util/__pycache__/xml_util.cpython-39.pyc | Bin 0 -> 1406 bytes util/copy_util.py | 42 + util/file_store_path.py | 84 + util/http_file_upload.py | 86 + util/http_util.py | 158 ++ util/json_util.py | 44 + util/md5_util.py | 16 + util/remote_sensing_util.py | 91 + util/scan_file_util.py | 1195 +++++++++++++ util/snow_ari.py | 103 ++ util/xml_util.py | 50 + util/zxby.py | 78 + 52 files changed, 4163 insertions(+) create mode 100644 .idea/.gitignore create mode 100644 .idea/dms.iml create mode 100644 .idea/inspectionProfiles/Project_Default.xml create mode 100644 .idea/inspectionProfiles/profiles_settings.xml create mode 100644 .idea/misc.xml create mode 100644 .idea/modules.xml create mode 100644 .idea/vcs.xml create mode 100644 __pycache__/app.cpython-39.pyc create mode 100644 app.py create mode 100644 application/__init__.py create mode 100644 application/__pycache__/__init__.cpython-39.pyc create mode 100644 application/settings/__init__.py create mode 100644 application/settings/__pycache__/__init__.cpython-39.pyc create mode 100644 application/settings/__pycache__/dev.cpython-39.pyc create mode 100644 application/settings/__pycache__/prop.cpython-39.pyc create mode 100644 application/settings/dev.py create mode 100644 application/settings/prop.py create mode 100644 common/__init__.py create mode 100644 common/__pycache__/__init__.cpython-39.pyc create mode 100644 common/config/__init__.py create mode 100644 common/config/__pycache__/__init__.cpython-39.pyc create mode 100644 common/config/__pycache__/factory.cpython-39.pyc create mode 100644 common/config/factory.py create mode 100644 common/tools/__init__.py create mode 100644 common/tools/__pycache__/__init__.cpython-39.pyc create mode 100644 common/tools/__pycache__/dms.cpython-39.pyc create mode 100644 common/tools/dms.py create mode 100644 scan_data/GetMetaInfo.py create mode 100644 scan_data/__init__.py create mode 100644 scan_data/__pycache__/GetMetaInfo.cpython-39.pyc create mode 100644 scan_data/__pycache__/__init__.cpython-39.pyc create mode 100644 scan_data/__pycache__/example.cpython-39.pyc create mode 100644 scan_data/example.py create mode 100644 util/__init__.py create mode 100644 util/__pycache__/__init__.cpython-39.pyc create mode 100644 util/__pycache__/file_store_path.cpython-39.pyc create mode 100644 util/__pycache__/http_file_upload.cpython-39.pyc create mode 100644 util/__pycache__/http_util.cpython-39.pyc create mode 100644 util/__pycache__/remote_sensing_util.cpython-39.pyc create mode 100644 util/__pycache__/scan_file_util.cpython-39.pyc create mode 100644 util/__pycache__/xml_util.cpython-39.pyc create mode 100644 util/copy_util.py create mode 100644 util/file_store_path.py create mode 100644 util/http_file_upload.py create mode 100644 util/http_util.py create mode 100644 util/json_util.py create mode 100644 util/md5_util.py create mode 100644 util/remote_sensing_util.py create mode 100644 util/scan_file_util.py create mode 100644 util/snow_ari.py create mode 100644 util/xml_util.py create mode 100644 util/zxby.py diff --git a/.idea/.gitignore b/.idea/.gitignore new file mode 100644 index 0000000..73f69e0 --- /dev/null +++ b/.idea/.gitignore @@ -0,0 +1,8 @@ +# Default ignored files +/shelf/ +/workspace.xml +# Datasource local storage ignored files +/dataSources/ +/dataSources.local.xml +# Editor-based HTTP Client requests +/httpRequests/ diff --git a/.idea/dms.iml b/.idea/dms.iml new file mode 100644 index 0000000..3f70c5c --- /dev/null +++ b/.idea/dms.iml @@ -0,0 +1,19 @@ + + + + + + + + + + + + + \ No newline at end of file diff --git a/.idea/inspectionProfiles/Project_Default.xml b/.idea/inspectionProfiles/Project_Default.xml new file mode 100644 index 0000000..d299771 --- /dev/null +++ b/.idea/inspectionProfiles/Project_Default.xml @@ -0,0 +1,179 @@ + + + + \ No newline at end of file diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml new file mode 100644 index 0000000..105ce2d --- /dev/null +++ b/.idea/inspectionProfiles/profiles_settings.xml @@ -0,0 +1,6 @@ + + + + \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml new file mode 100644 index 0000000..a98794f --- /dev/null +++ b/.idea/misc.xml @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml new file mode 100644 index 0000000..e95a9e2 --- /dev/null +++ b/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml new file mode 100644 index 0000000..94a25f7 --- /dev/null +++ b/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/__pycache__/app.cpython-39.pyc b/__pycache__/app.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..06757f94139292dce0022f1f99f76520b6429a8d GIT binary patch literal 477 zcmYjN%SyvQ6rD$srb!BJ+$)ICP2G!#popt(6f7YShRno<&Ld0`C2sXYEV%KPvUTMz zbmg52wioW<+_|rFhfO9C0$ZGZvKS-u?Sg%y5L|&b3jmHdmPp|gW5>adPndciZKsY#6CbKS|jKk-=k^t1JltI5B{xW%?e0Z z>8j0HT?3B_D_ARBG*B*{H~6e@_4@|e(KUGE0dNd6$^13B6H=O^M`I;F{z7&HxgBgy zZqBo^k|KLCc9qqAyEMA*m{u&~s>xv8tnP<+>4xM`Bf0ylh5#U9$ACka=ZaN2&*#%P z^-Cw-y8IABx2y`*Rz_R*iRocslrnl&7+qFRvyv69vAuPA2kK7qt?63vKUXs)@_eAS h(>u+7EZnZXP=T)NYn1Av4iI3XbpXow>(#J@t9ZyEpq literal 0 HcmV?d00001 diff --git a/app.py b/app.py new file mode 100644 index 0000000..87a9632 --- /dev/null +++ b/app.py @@ -0,0 +1,16 @@ +from flask import Flask + +from application import init_app +from common.config.factory import create_app + +app = init_app('dev') +app = create_app() + + +@app.route('/') +def hello_world(): + return 'Hello World!' + + +if __name__ == '__main__': + app.run() diff --git a/application/__init__.py b/application/__init__.py new file mode 100644 index 0000000..0742144 --- /dev/null +++ b/application/__init__.py @@ -0,0 +1,52 @@ +# 项目初始化文件 +import logging +from logging.handlers import RotatingFileHandler +from flask import Flask +from redis import StrictRedis +from flask_wtf.csrf import CSRFProtect +from flask_session import Session +from flask_sqlalchemy import SQLAlchemy + +from application.settings.dev import DevelopmentConfig +from application.settings.prop import ProductionConfig +from common.config.factory import create_app + +config = { + 'dev': DevelopmentConfig, + 'prop': ProductionConfig, + +} + + +def init_app(config_name): + """ + 项目初始化 + :param config_name: + :return: + """ + # 主应用的根目录 + app = Flask(__name__) + + # 设置配置类 + Config = config[config_name] + + # 加载配置 + app.config.from_object(Config) + + # redis的连接初始化 + # global redis_store + # redis_store = StrictRedis(host=Config.CACHE_REDIS_HOST, port=Config.CACHE_REDIS_PORT, db=0) + + # 开启CSRF防范 + CSRFProtect(app) + + # 开启session + Session(app) + + # 增加数据库连接 + # db.init_app(app) + # + # # 启用日志 + # setup_log(Config) + + return app diff --git a/application/__pycache__/__init__.cpython-39.pyc b/application/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f77d46850a5fd309661b320c2a58ac076bd783a7 GIT binary patch literal 956 zcmZuwOK;RL5Vn)equB@Bmqa=BgtV843zrI2pjH(JgrIw~R*FpGY*XwwINnw(ZBKCE zb>;#{R6;^rxbs(XMHT8_;D8uA73_hDJpN|JW6jLx(0)HcAmPo=^i>C;AI4Y~7mT~G z>erw$lwppD*&U9BLmX_d%U#hS9g92M6F%`R_IMyd5?bu@6%moh;(&KWkMt}Kd0!03 zz~U8tO$^D<;)t(`HL`{g8?a&4{os)Gte5p)AhMC+F&Ygnp&s;bgq!t4si`i?=|RES zeOhLmDR6rSoYqePbjMm1sXk;`Q5$n_e0cCkNzGCXW-w-TUC0t(G=BWx9#4;$n1i*t z&rTSZm0+dblVx5^!8wG)Y?f+B`0I;O#i(Wpt*VjJxEVWXI+c=DicN)AV#a9J9<2Hn zD2+6Jj?b`m7HA8dISYL3d~z4gImSqL77&BoYxjj|7|(VgE!qaTJpcal{kMykA1}VV zxp@1|`gSU+sMt6t29W8^DSL=z_VM3OXqo6vM~B}F2i9K%$U2WONI zYUGmc(v6J^JvM&Z`B$XY;K;Ar`#X~yCOglAK>mymtXS5V_l8Jf(8N5h=A6ZV4 zq$mrWBylxwLJJ|f376D8P$&r9(Crx=stvG5n@zywbPCjx;T4FFT7qlkz=_7o4av4C z;wi(hf=7(V!1|L@oyTdda@!HMZJFb~F&#R7sS{0h` z)_=o>bhTp2+DxSovW(l=#W_v2RC8tiG&L~Fv}eb+q}EMlct^kk<80ToSKj~?;@-c{ MvDkI6^V<*p0BisMJOBUy literal 0 HcmV?d00001 diff --git a/application/settings/__init__.py b/application/settings/__init__.py new file mode 100644 index 0000000..78ef342 --- /dev/null +++ b/application/settings/__init__.py @@ -0,0 +1,44 @@ +from redis import StrictRedis + + +# 全局通用配置类 +class Config(object): + """项目配置核心类""" + # 调试模式 + DEBUG = True + + # todo 配置日志 + LOG_LEVEL = "DEBUG" + + # mysql数据库配置信息(指定字符集?charset=utf8) + # 数据库连接格式 + 'SQLALCHEMY_DATABASE_URI ="数据库类型://用户名:密码@ip:port:库名?指定字符集编码"' + # SQLALCHEMY_DATABASE_URI = "postgresql://postgres:123456@192.168.2.139:5432/students?charset=utf8" + # # 动态追踪修改设置,如未设置只会提示警告 + # SQLALCHEMY_TRACK_MODIFICATIONS = False + # # 查询时会显示原始SQL语句 + # SQLALCHEMY_ECHO = False + # + # # 配置redis + # # 项目上线以后,这个地址就会被替换成真实IP地址,mysql也是 + # CACHE_TYPE = 'redis' + # CACHE_REDIS_HOST = 'localhost' + # CACHE_REDIS_PORT = 6379 + # CACHE_REDIS_DB = '8' + # CACHE_REDIS_PASSWORD = 'sdust2020' + # + # # 设置session 秘钥 + # # 可以通过 base64.b64encode(os.urandom(48)) 来生成一个指定长度的随机字符串 + # SECRET_KEY = "CF3tEA1J3hRyIOw3PWE3ZE9+hLOcUDq6acX/mABsEMTXNjRDm5YldRLIXazQviwP" + # + # # flask_session的配置信息 + # SESSION_TYPE = 'redis' # 指定session保存到redis中 + # SESSION_USE_SIGNER = True # 让 cookie 中的 session_id 被加密签名处理 + # SESSION_REDIS = StrictRedis(host=CACHE_REDIS_HOST, port=CACHE_REDIS_PORT) # 使用 redis 的实例 + # PERMANENT_SESSION_LIFETIME = 24 * 60 * 60 # session 的有效期,单位是秒 + XML_PATH = 'C:/Users/HP/Desktop/Number tube/test' # xml存储路径 + THUMBNAIL_PATH = 'C:/Users/HP/Desktop/Number tube/test' # 拇指图存储路径 + DFS_UPLOAD_URL = 'http://192.168.2.9:4096/group1/upload' # 文件服务器请求路径 + RESING_DATA_URL = 'http://192.168.2.9:8820/api/remote-sensing-data/add' # 入库遥感数据请求地址 + DATA_TASK_URL = 'http://192.168.2.9:8820/api/data-storage-task-record/add' # 添加遥感数据归档任务请求地址 + DATA_END_TASK_URL = 'http://192.168.2.9:8820/api/data-storage-task-record/end' # 结束遥感数据归档任务请求地址 diff --git a/application/settings/__pycache__/__init__.cpython-39.pyc b/application/settings/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b62497d5ca63981f5704b5b286c925875fdec549 GIT binary patch literal 807 zcmbtSJ8u&~5Z?2hpNR<|(NUy;!i9Z#pvbmR6enk#2*-*1K&jHip2s_Lf@Q|Odg?cF}N;< z0$WPOvNC``n;=iKH4U{8q(Fa(K&PoQ-PXbAp;CILSg52$3q8r*k?c}%#VC$l7fpdJ zrD8=Hifj##odQkuNZAJHb{Y&jqcjU?I|ms%4_UhaIeP)}YDOW->@fex9fx;R`Rv`N z%eNP^SFbO>T>Loy`2FkI<%iGxC~Y;j2d(M7ow_*?lnZnB(6p#Hkt{Gfp*N&_S%yPu zN-E^^{;`xn-85HgYn9cFYGtibt3Oz&ZJ1-u!eG@5g9#%rUH=QMT3uT)N#L5CdQ8%? zpuTYZaT$mtCIOgM|5X&T%0eet?i!rbf#ytLGJKsKN;db*$vmNE0?ziz_qrV&7=yo!Ikq14@Zhk0 z(6I2J+m4pH&E8(8g{?-vkpzl~*KhP5C-7DRn;i?!L+=q^QsWZybJMCjNA83=FBm^@ zf>U|S{L_&59&x}E4he#ZJ0j9$z9XoV>S2Y0vFp1M<4SPKb=4`YRNPi3rx__jH|`qR s{3Q8K$%xWC-b0>JtSG~V&*(@dxj$FA@@~e2+{3H)+Kks!#5Ie?FW5}riU0rr literal 0 HcmV?d00001 diff --git a/application/settings/__pycache__/dev.cpython-39.pyc b/application/settings/__pycache__/dev.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..46521300b20ed827389e1f3576136f85f35976a0 GIT binary patch literal 421 zcmYe~<>g`kf@xFcByI%Kk3k${zyjnr0CBMbkVs)jVa#F3WsG8EWN>FlVM<|cVMt+4 zWo%}SVoG5NX3%7P$p}=Y$#{#+IX^EgGd&r|LjfRZ2OusM0TQVUQH&`JK&^~X%qdJ! zEGf)DBUo+;x}=t+=HwUTrskDEj44$Ds^(Gvf~R{Lp7u|CwqoJaJ^fF1bU&Zf^0Kw} z`QCM4$q+wH)>|C$@p*~4sqyi*xZ>k;^HWN5Ky04)_`=e}9HT!T zpdcqRIk6-&KQE>@wWK67FTFS>CACbipt6V=Xb33eia`ViBMS>y#7~p?7DEve$O4c8 YgjEn0C?alg*g)KD2MVcTkXjyQ0D|0WcmMzZ literal 0 HcmV?d00001 diff --git a/application/settings/__pycache__/prop.cpython-39.pyc b/application/settings/__pycache__/prop.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6c7e88a3407d9ba19d24909468be6dd74174a143 GIT binary patch literal 401 zcmYe~<>g`kg42`dBo+bb#~={9pjO5x<`kwV zmK0{75iGX^0*dleN|Q@6^Yb94lu7^0?&q^wUbgl=-@DGu zPm}c)M|^xwllc}y5fjJ;kOG8V5EdxVZ*kZ_oN5OOm12-u G9%cZtVrPi} literal 0 HcmV?d00001 diff --git a/application/settings/dev.py b/application/settings/dev.py new file mode 100644 index 0000000..e2d014e --- /dev/null +++ b/application/settings/dev.py @@ -0,0 +1,9 @@ +from . import Config + + +class DevelopmentConfig(Config): + """ + 开发模式下的配置 + """ + # 查询时会显示原始SQL语句 + SQLALCHEMY_ECHO = True diff --git a/application/settings/prop.py b/application/settings/prop.py new file mode 100644 index 0000000..52b2c5c --- /dev/null +++ b/application/settings/prop.py @@ -0,0 +1,6 @@ +from . import Config + + +class ProductionConfig(Config): + """生产模式下的配置""" + DEBUG = False diff --git a/common/__init__.py b/common/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/common/__pycache__/__init__.cpython-39.pyc b/common/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5bf55a609baed2d228a42526627736d259089056 GIT binary patch literal 139 zcmYe~<>g`kf{elg2_X70h(HF6K#l_t7qb9~6oz01O-8?!3`HPe1o6wj#VRH(Gbc5s zJijPArl7JUBR{XQG%+tXF(xIqI3_tiH#a{oCO$qhFS8^*Uaz3?7Kcr4eoARhsvXFr I&p^xo0Nv*xl>h($ literal 0 HcmV?d00001 diff --git a/common/config/__init__.py b/common/config/__init__.py new file mode 100644 index 0000000..83fd1a2 --- /dev/null +++ b/common/config/__init__.py @@ -0,0 +1,5 @@ +""" +Author : XinYi Song +Time : 2021/11/23 9:47 +Desc: +""" diff --git a/common/config/__pycache__/__init__.cpython-39.pyc b/common/config/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..120e3d31aee7b1ed1cb36200bf166907e244503d GIT binary patch literal 212 zcmYe~<>g`k0z;2EiB3TJF^Gc<7=auIATH(r5-AK(3@MDk44O<;W?YV?B^mie3JO*V z5t(_BnF_)AdFfmsnYpP7Kwza{WME{dZ)m7*WUOFmWn#|dl3JW>#pS2Tc#AzgJ|#an zK7J)b5fji*F!9UW#VRH(Gbc5sJijPArl1mNc3x#^VqR`yOiFHXOmcp1Zhjt+%1g^k hkBN`Z%*!l^kJl@xyv1RYo1apelWGTY>1QBj0019NHPiqA literal 0 HcmV?d00001 diff --git a/common/config/__pycache__/factory.cpython-39.pyc b/common/config/__pycache__/factory.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..29ab233195d36aef96dfec44630fc5d9f0fd1b07 GIT binary patch literal 791 zcmah{OK%e~5VpPEN4JSYE4Z&XkxSd8s1*@Hq)ia5N?MVEz>2go>rJxlJ|cSqq$=W2 zent-MkvqS}S5Ey2AYS8$mfo0^XTR~<-+Z$(yjt}U*sqT>KBEYIHNj0I5IlyZR=^3w zkideZI3tW;gRRucD5Ds~^b9duP+@;U%n?ox)!dT}zcndGg$#mb@G8k)Cqb{shyG!b z#Q|6|Xsk8rtM&S7V?B7#yuazUV-+?1OUQ+4Sldrhu8sjLwhnsHC>E16mXY~*Wf*`d zU>la&1y`aHpWzSa6djRs%ZL@05hWbq&ZzLj0$|_ZArXr+>jGm`(sTP1{YB0N!f+ep zSGtEM)s{;5YH!5z;fN>9$)@2^@#f_2q)gJLig+Flk~9uYxH1o4?$*~Dp-80mlDv%N zTb^FN2VFI)Ng^~IO!A1irA&sySZY_rQIQM9D6lsEL%VKhFTWTvi{0sMKkc+%>~)@p zt%IGg(`{|-b=qwC-}d3ovku$ub{O5=-|E#|XL^B@^&XEGMNjP-)L z`oj#QCuPXTV{NOF%Mv}-Geyb+N9_u(5Xbrh Dd|1)^ literal 0 HcmV?d00001 diff --git a/common/config/factory.py b/common/config/factory.py new file mode 100644 index 0000000..e0df62e --- /dev/null +++ b/common/config/factory.py @@ -0,0 +1,85 @@ +""" +Author : XinYi Song +Time : 2021/11/23 9:47 +Desc: +""" +from flask import Flask +from flask_apscheduler import APScheduler + +scheduler = APScheduler() + + +def create_app(): + app = Flask(__name__) + # 配置任务,不然无法启动任务 + app.config.update( + { + "SCHEDULER_API_ENABLED": True, + "SCHEDULER_TIMEZONE": "Asia/Shanghai", + "JOBS": [ + { + "id": "my_job", # 任务ID + "func": "util:scan_file_util.scan_VJ102_dir", # 任务位置 + "trigger": "interval", # 触发器 + "seconds": 2 * 60 # 时间间隔 + }, + { + "id": "job2", # 任务ID + "func": "util:scan_file_util.scan_VJ103_dir", # 任务位置 + "trigger": "interval", # 触发器 + "seconds": 3 * 60 # 时间间隔 + }, + { + "id": "job3", # 任务ID + "func": "util:scan_file_util.scan_GF3MDJ_dir", # 任务位置 + "trigger": "interval", # 触发器 + "seconds": 3 * 60 # 时间间隔 + }, + { + "id": "job4", # 任务ID + "func": "util:scan_file_util.scan_H08_dir", # 任务位置 + "trigger": "interval", # 触发器 + "seconds": 3 * 60 # 时间间隔 + }, + { + "id": "job5", # 任务ID + "func": "util:scan_file_util.scan_Sentinel1_dir", # 任务位置 + "trigger": "interval", # 触发器 + "seconds": 3 * 60 # 时间间隔 + }, + { + "id": "job6", # 任务ID + "func": "util:scan_file_util.scan_Sentinel2_dir", # 任务位置 + "trigger": "interval", # 触发器 + "seconds": 3 * 60 # 时间间隔 + }, + { + "id": "job7", # 任务ID + "func": "util:scan_file_util.scan_Sentinel3OL_dir", # 任务位置 + "trigger": "interval", # 触发器 + "seconds": 3 * 60 # 时间间隔 + }, + { + "id": "job8", # 任务ID + "func": "util:scan_file_util.scan_HJ1_dir", # 任务位置 + "trigger": "interval", # 触发器 + "seconds": 3 * 60 # 时间间隔 + }, + { + "id": "job9", # 任务ID + "func": "util:scan_file_util.scan_ZY3_dir", # 任务位置 + "trigger": "interval", # 触发器 + "seconds": 3 * 60 # 时间间隔 + }, + { + "id": "job10", # 任务ID + "func": "util:scan_file_util.scan_SNPP_dir", # 任务位置 + "trigger": "interval", # 触发器 + "seconds": 3 * 60 # 时间间隔 + } + ] + } + ) + scheduler.init_app(app) + scheduler.start() + return app diff --git a/common/tools/__init__.py b/common/tools/__init__.py new file mode 100644 index 0000000..4fb4384 --- /dev/null +++ b/common/tools/__init__.py @@ -0,0 +1,5 @@ +""" +Author : XinYi Song +Time : 2021/11/22 14:11 +Desc: +""" diff --git a/common/tools/__pycache__/__init__.cpython-39.pyc b/common/tools/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cc6a138ec7f1434d7afb6fe6407141730f72c6c5 GIT binary patch literal 212 zcmYe~<>g`kf*B^W6P+B*fs$M+WpfUcG|(D@5Spo1T}fkRMokKo32b{0+V9+S<c?)JbH6FYI%S?0df%tT52<&B7G3wdG$$U6&}*G-1%43tUwgqMOQYRJa{>MF`WJ}nvNb%AC1PZ9;kH4h$vYRs^G@wvUBMB!D_1+oLgzOnycZ; zO6#m2_fkKj-L$CeBuhylg0{`C_k-2dmAc+`N>142I470Ui#fkW8LsPS`+td5c=G}gM z_s{Y6!@XahPBtGyezNsq{OaCh_~f6_X#Cr7`upbO?$h!1lNkISm6X)qs-m4kc4jvdTV`Ig--k&M%XlJM*kQBA>OR_%M=FK^mMCJT1Bn ziR*o6W+r|L-=QpL(kgaRPV(f;d3s!Wx?e4l)josgho-7?aT+q0>vm;F5vEBLRm&y( u=`^LvNUju3b$Tj;3#K42`J=gBs;h$NoX;sPGjc)h8P&+N77fQezW6`0%8{i2 literal 0 HcmV?d00001 diff --git a/common/tools/dms.py b/common/tools/dms.py new file mode 100644 index 0000000..ee12ca2 --- /dev/null +++ b/common/tools/dms.py @@ -0,0 +1,40 @@ +""" +Author : XinYi Song +Time : 2021/11/5 14:12 +Desc: +""" +from util.http_util import httpUtil + + +def dms_login(): + """ + 数管系统登录 + :return: + """ + res = httpUtil(url='http://192.168.2.9:8820/api/login', + params={"userName": "client1", "password": "sxy1998"}).post_param() + return res.json()['data'] + + +def dms_task_record(token_s: str, collectionCode: str): + """ + 调用数管系统获取遥感数据归档任务的接口 + :param collectionCode: + :param token_s: + :return: + """ + res = httpUtil(url='http://192.168.2.9:8820/api/data-storage-task-record/get/collection-code/revision', + params={"collectionCode": collectionCode, "revision": 1}, token=token_s).get_herder() + return res.json()['data'] + + +def dms_sensing_data(token_s: str, collectionCode: str): + """ + 调用数管系统获取所有遥感数据的接口 + :param collectionCode: + :param token_s: + :return: + """ + res = httpUtil(url='http://192.168.2.9:8820/api/remote-sensing-data/get/collection-code', + params={"collectionCode": collectionCode}, token=token_s).get_herder() + return res.json()['data'] diff --git a/scan_data/GetMetaInfo.py b/scan_data/GetMetaInfo.py new file mode 100644 index 0000000..1151087 --- /dev/null +++ b/scan_data/GetMetaInfo.py @@ -0,0 +1,1554 @@ +from xml.dom import minidom +from osgeo import gdal +from osgeo import ogr +from osgeo import gdalconst +import h5py +from PIL import Image +import numpy as np +import tarfile +import zipfile +import re +import os +import io +import sys + + +def exe_path(): + """ + [获取exe目录] + Returns: + [str]: [exe目录] + """ + if hasattr(sys, 'frozen'): + # Handles PyInstaller + return os.path.dirname(sys.executable) + return os.path.dirname(os.path.realpath(__file__)) + + +os.environ['PROJ_LIB'] = exe_path() + "/PROJ" + + +def uint16to8(bands, lower_percent=0.001, higher_percent=99.999): + """ + 拉伸图像:图片16位转8位 + :param bands: 输入栅格数据 + :param lower_percent: 最低百分比 + :param higher_percent: 最高百分比 + :return: + """ + out = np.zeros_like(bands, dtype=np.uint8) + n = bands.shape[0] + for i in range(n): + a = 0 # np.min(band) + b = 255 # np.max(band) + c = np.percentile(bands[i, :, :], lower_percent) + d = np.percentile(bands[i, :, :], higher_percent) + t = a + (bands[i, :, :] - c) * (b - a) / (d - c) + t[t < a] = a + t[t > b] = b + out[i, :, :] = t + return out + + +def createXML(metadata, xlm_file): + """ + 创建xlm文件并写入字典 + :param metadata: 元数据信息 + :param xlm_file: xlm文件 + :return: + """ + # 创建一个空的文档 + document = minidom.Document() # 创建DOM文档对象 + # 创建一个根节点对象 + root = document.createElement('ProductMetaData') + # 设置根节点的属性 + # root.setAttribute('', '') + # 将根节点添加到文档对象中 + document.appendChild(root) + # 字典转xml + for key in metadata: + # 创建父节点 + node_name = document.createElement(key) + # 给父节点设置文本 + node_name.appendChild(document.createTextNode(str(metadata[key]))) + # 将各父节点添加到根节点 + root.appendChild(node_name) + # 写入xlm文档 + with open(xlm_file, 'w', encoding='utf-8') as f: + document.writexml(f, indent='\t', newl='\n', addindent='\t', encoding='utf-8') + f.close() + + +def GetGFPMSData(in_file, xml_path, thumbnail_ath): + """ + 获取高分 PMS卫星元数据 + :param thumbnail_ath: + :param xml_path: + :param in_file: + :return: 元数据字典 + """ + try: + in_path, basename = os.path.split(in_file) + with tarfile.open(in_file, mode='r') as tar_file: + extensions = ('MSS2_thumb.jpg', 'PAN2_thumb.jpg', 'MSS2.xml', 'PAN2.xml') + file_list = [file for file in tar_file.getnames() if file.endswith(extensions)] + + # 解压多光谱缩略图 + if file_list[1].endswith('MSS2_thumb.jpg'): + tar_file.extract(file_list[1], thumbnail_ath) + ThumbnailPath_MSS = in_path + "/" + file_list[1] + ThumbnailName_MSS = file_list[1] + else: + return {"code": -1, "msg": "找不到指定文件..."} + + # 解压全色缩略图 + if file_list[3].endswith("PAN2_thumb.jpg"): + tar_file.extract(file_list[3], thumbnail_ath) + ThumbnailPath_PAN = thumbnail_ath + "/" + file_list[3] + ThumbnailName_PAN = file_list[3] + else: + return {"code": -1, "msg": "找不到指定文件..."} + + # 解压多光谱XML文件 + if file_list[0].endswith('MSS2.xml'): + # 解压XML文件 + tar_file.extract(file_list[0], xml_path) + xmlPath = xml_path + "/" + file_list[0] + xmlFileName = file_list[0] + # 获取文件流 + meta_file = tar_file.extractfile(file_list[0]) + meta_content = meta_file.read() + dom = minidom.parse(io.StringIO(meta_content.decode("utf-8"))) + + # 产品日期 + ProduceTime = dom.getElementsByTagName('ProduceTime')[0].firstChild.data + StartTime = dom.getElementsByTagName('StartTime')[0].firstChild.data + EndTime = dom.getElementsByTagName('EndTime')[0].firstChild.data + + # 其他信息 + # WidthInPixels = dom.getElementsByTagName('WidthInPixels')[0].firstChild.data + # HeightInPixels = dom.getElementsByTagName('HeightInPixels')[0].firstChild.data + ImageGSD = dom.getElementsByTagName('ImageGSD')[0].firstChild.data # 分辨率 + Bands = dom.getElementsByTagName('Bands')[0].firstChild.data # 波段 + CloudPercent = dom.getElementsByTagName('CloudPercent')[0].firstChild.data # 云覆盖 + + # 中心经纬度 + TopLeftLatitude = dom.getElementsByTagName('TopLeftLatitude')[0].firstChild.data # 左上纬度 + TopLeftLongitude = dom.getElementsByTagName('TopLeftLongitude')[0].firstChild.data # 左上经度 + TopRightLatitude = dom.getElementsByTagName('TopRightLatitude')[0].firstChild.data # 右上纬度 + TopRightLongitude = dom.getElementsByTagName('TopRightLongitude')[0].firstChild.data # 右上经度 + BottomRightLatitude = dom.getElementsByTagName('BottomRightLatitude')[0].firstChild.data # 右下纬度 + BottomRightLongitude = dom.getElementsByTagName('BottomRightLongitude')[0].firstChild.data # 右下经度 + BottomLeftLatitude = dom.getElementsByTagName('BottomLeftLatitude')[0].firstChild.data # 左下纬度 + BottomLeftLongitude = dom.getElementsByTagName('BottomLeftLongitude')[0].firstChild.data # 左下经度 + + # 边界几何 + boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \ + f'{TopRightLongitude} {TopRightLatitude},' \ + f'{BottomRightLongitude} {BottomRightLatitude},' \ + f'{BottomLeftLongitude} {BottomLeftLatitude},' \ + f'{TopLeftLongitude} {TopLeftLatitude}))' + + # 构建多光谱字典 + gf_mss_dict = {"ProduceTime": ProduceTime, + "StartTime": StartTime, + "EndTime": EndTime, + "CloudPercent": CloudPercent, + "boundaryGeomStr": boundaryGeomStr, + "bands": Bands, + "ImageGSD": ImageGSD, + # "WidthInPixels": WidthInPixels, + # "HeightInPixels": HeightInPixels, + "ProjectedCoordinates": "", + "CollectionCode": "", + "ThumbnailPath": ThumbnailPath_MSS, + "ThumbnailName": ThumbnailName_MSS, + "xmlPath": xmlPath, + "xmlFileName": xmlFileName, + "DirectoryDepth": "month"} + else: + return {"code": -1, "msg": "找不到指定文件..."} + + # 解压全色XML文件 + if file_list[2].endswith('PAN2.xml'): + # 解压XML文件 + tar_file.extract(file_list[2], xml_path) + xmlPath = xml_path + "/" + file_list[2] + xmlFileName = file_list[2] + + # 获取文件流 + meta_file = tar_file.extractfile(file_list[2]) + meta_content = meta_file.read() + dom = minidom.parse(io.StringIO(meta_content.decode("utf-8"))) + + # 产品日期 + ProduceTime = dom.getElementsByTagName('ProduceTime')[0].firstChild.data + StartTime = dom.getElementsByTagName('StartTime')[0].firstChild.data + EndTime = dom.getElementsByTagName('EndTime')[0].firstChild.data + + # 其他信息 + ImageGSD = dom.getElementsByTagName('ImageGSD')[0].firstChild.data # 分辨率 + # WidthInPixels = dom.getElementsByTagName('WidthInPixels')[0].firstChild.data + # HeightInPixels = dom.getElementsByTagName('HeightInPixels')[0].firstChild.data + Bands = dom.getElementsByTagName('Bands')[0].firstChild.data # 波段 + CloudPercent = dom.getElementsByTagName('CloudPercent')[0].firstChild.data # 云覆盖 + + # 中心经纬度 + TopLeftLatitude = dom.getElementsByTagName('TopLeftLatitude')[0].firstChild.data # 左上纬度 + TopLeftLongitude = dom.getElementsByTagName('TopLeftLongitude')[0].firstChild.data # 左上经度 + TopRightLatitude = dom.getElementsByTagName('TopRightLatitude')[0].firstChild.data # 右上纬度 + TopRightLongitude = dom.getElementsByTagName('TopRightLongitude')[0].firstChild.data # 右上经度 + BottomRightLatitude = dom.getElementsByTagName('BottomRightLatitude')[0].firstChild.data # 右下纬度 + BottomRightLongitude = dom.getElementsByTagName('BottomRightLongitude')[0].firstChild.data # 右下经度 + BottomLeftLatitude = dom.getElementsByTagName('BottomLeftLatitude')[0].firstChild.data # 左下纬度 + BottomLeftLongitude = dom.getElementsByTagName('BottomLeftLongitude')[0].firstChild.data # 左下经度 + + # 边界几何 + boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \ + f'{TopRightLongitude} {TopRightLatitude},' \ + f'{BottomRightLongitude} {BottomRightLatitude},' \ + f'{BottomLeftLongitude} {BottomLeftLatitude},' \ + f'{TopLeftLongitude} {TopLeftLatitude}))' + + # 构建全色字典 + gf_pan_dict = {"ProduceTime": ProduceTime, + "StartTime": StartTime, + "EndTime": EndTime, + "CloudPercent": CloudPercent, + "boundaryGeomStr": boundaryGeomStr, + "bands": Bands, + "ImageGSD": ImageGSD, + # "WidthInPixels": WidthInPixels, + # "HeightInPixels": HeightInPixels, + "ProjectedCoordinates": "", + "CollectionCode": "", + "ThumbnailPath": ThumbnailPath_PAN, + "ThumbnailName": ThumbnailName_PAN, + "xmlPath": xmlPath, + "xmlFileName": xmlFileName, + "DirectoryDepth": "month"} + else: + return {"code": -1, "msg": "找不到指定文件..."} + # 关闭压缩文件 + tar_file.close() + if (not gf_mss_dict) or (not gf_pan_dict): + return {"code": -1, "msg": "没有满足条件的数据字典..."} + return gf_mss_dict, gf_pan_dict + except Exception as e: + print(str(e)) + return {"code": -1, "msg": str(e)} + + +def GetGF3MDJData(in_file, xml_path, thumbnail_path): + """ + 获取高分3号MDJ(GF-3 MDJ)卫星元数据 + :param thumbnail_path: + :param xml_path: + :param in_file: + :return: 元数据字典 + """ + try: + in_path, basename = os.path.split(in_file) + with tarfile.open(in_file, mode='r') as tar_file: + extensions = ('.thumb.jpg', 'meta.xml') + file_list = [file for file in tar_file.getnames() if file.endswith(extensions)] + # 解压缩略图 + if file_list[0].endswith('.thumb.jpg'): + tar_file.extract(file_list[0], thumbnail_path) + ThumbnailPath = thumbnail_path + "/" + file_list[0] + ThumbnailName = file_list[0] + else: + return {"code": -1, "msg": "找不到指定文件..."} + + # 解压XML文件 + if file_list[1].endswith('meta.xml'): + tar_file.extract(file_list[1], xml_path) + xmlPath = xml_path + "/" + file_list[1] + xmlFileName = file_list[1] + # 获取文件流 + meta_file = tar_file.extractfile(file_list[1]) + meta_content = meta_file.read() + dom = minidom.parse(io.StringIO(meta_content.decode("utf-8"))) + + # 产品日期 + CollectionCode = "GF3_MDJ" + ProduceTime = dom.getElementsByTagName('productGentime')[0].firstChild.data + StartTime = dom.getElementsByTagName("imagingTime")[0].getElementsByTagName("start")[0].firstChild.data + EndTime = dom.getElementsByTagName("imagingTime")[0].getElementsByTagName("end")[0].firstChild.data + + # 其他信息 + ImageGSD = dom.getElementsByTagName('NominalResolution')[0].firstChild.data + # EarthModel = dom.getElementsByTagName('EarthModel')[0].firstChild.data + ProjectedCoordinates = dom.getElementsByTagName('ProjectModel')[0].firstChild.data + Bands = "1,2" + + # 经纬度 + TopLeftLatitude = dom.getElementsByTagName("topLeft")[0].getElementsByTagName("latitude")[ + 0].firstChild.data + TopLeftLongitude = dom.getElementsByTagName("topLeft")[0].getElementsByTagName("longitude")[ + 0].firstChild.data + TopRightLatitude = dom.getElementsByTagName("topRight")[0].getElementsByTagName("latitude")[ + 0].firstChild.data + TopRightLongitude = dom.getElementsByTagName("topRight")[0].getElementsByTagName("longitude")[ + 0].firstChild.data + BottomLeftLatitude = dom.getElementsByTagName("bottomLeft")[0].getElementsByTagName("latitude")[ + 0].firstChild.data + BottomLeftLongitude = dom.getElementsByTagName("bottomLeft")[0].getElementsByTagName("longitude")[ + 0].firstChild.data + BottomRightLatitude = dom.getElementsByTagName("bottomRight")[0].getElementsByTagName("latitude")[ + 0].firstChild.data + BottomRightLongitude = dom.getElementsByTagName("bottomRight")[0].getElementsByTagName("longitude")[ + 0].firstChild.data + # 边界几何 + boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \ + f'{TopRightLongitude} {TopRightLatitude},' \ + f'{BottomRightLongitude} {BottomRightLatitude},' \ + f'{BottomLeftLongitude} {BottomLeftLatitude},' \ + f'{TopLeftLongitude} {TopLeftLatitude}))' + + # 构建字典 + gf3_dict = {"ProduceTime": ProduceTime, + "StartTime": StartTime, + "EndTime": EndTime, + "CloudPercent": "", + "boundaryGeomStr": boundaryGeomStr, + "Bands": Bands, + "ImageGSD": ImageGSD, + "ProjectedCoordinates": ProjectedCoordinates, + 'CollectionCode': CollectionCode, + "ThumbnailPath": ThumbnailPath, + "ThumbnailName": ThumbnailName, + "xmlPath": xmlPath, + "xmlFileName": xmlFileName, + "DirectoryDepth": "month"} + + else: + return {"code": -1, "msg": "找不到指定文件..."} + + # 判断字典是否为空 + if not gf3_dict: + return {"code": -1, "msg": "没有满足条件的数据字典..."} + return gf3_dict + except Exception as e: + return {"code": -1, "msg": str(e)} + + +def GetGF4PMIData(in_file, xml_path, thumbnail_path): + """ + 获取高分4号PMI(GF-4 PMI)卫星元数据 + PMS(可见光、近红外 5个波段 50m)、IRS(中波红外 1个波段 400m) + :param thumbnail_path: + :param xml_path: + :param in_file: + :return: 元数据字典 + """ + try: + in_path, basename = os.path.split(in_file) + with tarfile.open(in_file, mode='r') as tar_file: + extensions = ('_thumb.jpg', '.xml') + file_list = [file for file in tar_file.getnames() if file.endswith(extensions)] + + # 解压PMS缩略图 + if file_list[2].endswith('_thumb.jpg') and file_list[2].startswith('GF4_PMS_'): + tar_file.extract(file_list[2], thumbnail_path) + ThumbnailPath_PMS = thumbnail_path + "/" + file_list[2] + ThumbnailName_PMS = file_list[2] + else: + return {"code": -1, "msg": "找不到指定文件..."} + + # 解压IRS缩略图 + if file_list[0].endswith('_thumb.jpg') and file_list[0].startswith('GF4_IRS_'): + + tar_file.extract(file_list[0], thumbnail_path) + ThumbnailPath_IRS = thumbnail_path + "/" + file_list[0] + ThumbnailName_IRS = file_list[0] + else: + return {"code": -1, "msg": "找不到指定文件..."} + + # 解压PMS XML文件 + if file_list[3].endswith('.xml') and file_list[3].startswith('GF4_PMS_'): + # 解压XML文件 + tar_file.extract(file_list[3], xml_path) + xmlPath = xml_path + "/" + file_list[3] + xmlFileName = file_list[3] + + # 获取文件流 + meta_file = tar_file.extractfile(file_list[3]) + meta_content = meta_file.read() + dom = minidom.parse(io.StringIO(meta_content.decode("utf-8"))) + + # 产品日期 + ProduceTime = dom.getElementsByTagName('ProduceTime')[0].firstChild.data + StartTime = dom.getElementsByTagName('StartTime')[0].firstChild.data + EndTime = dom.getElementsByTagName('EndTime')[0].firstChild.data + + # 其他信息 + CollectionCode = "GF4_PMS" + ImageGSD = dom.getElementsByTagName('ImageGSD')[0].firstChild.data # 分辨率 + # ProjectedCoordinates = dom.getElementsByTagName('MapProjection')[0].firstChild.data # 投影坐标系 + ProjectedCoordinates = "" # 投影坐标系 + Bands = dom.getElementsByTagName('Bands')[0].firstChild.data # 波段 + CloudPercent = dom.getElementsByTagName('CloudPercent')[0].firstChild.data # 云覆盖 + + # 中心经纬度 + TopLeftLatitude = dom.getElementsByTagName('TopLeftLatitude')[0].firstChild.data # 左上纬度 + TopLeftLongitude = dom.getElementsByTagName('TopLeftLongitude')[0].firstChild.data # 左上经度 + TopRightLatitude = dom.getElementsByTagName('TopRightLatitude')[0].firstChild.data # 右上纬度 + TopRightLongitude = dom.getElementsByTagName('TopRightLongitude')[0].firstChild.data # 右上经度 + BottomRightLatitude = dom.getElementsByTagName('BottomRightLatitude')[0].firstChild.data # 右下纬度 + BottomRightLongitude = dom.getElementsByTagName('BottomRightLongitude')[0].firstChild.data # 右下经度 + BottomLeftLatitude = dom.getElementsByTagName('BottomLeftLatitude')[0].firstChild.data # 左下纬度 + BottomLeftLongitude = dom.getElementsByTagName('BottomLeftLongitude')[0].firstChild.data # 左下经度 + + boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \ + f'{TopRightLongitude} {TopRightLatitude},' \ + f'{BottomRightLongitude} {BottomRightLatitude},' \ + f'{BottomLeftLongitude} {BottomLeftLatitude},' \ + f'{TopLeftLongitude} {TopLeftLatitude}))' + + # 构建可见光近红外(PMS)字典 + gf4_pms_dict = {"ProduceTime": ProduceTime, + "StartTime": StartTime, + "EndTime": EndTime, + "CloudPercent": CloudPercent, + "boundaryGeomStr": boundaryGeomStr, + "Bands": Bands, + "ImageGSD": ImageGSD, + "ProjectedCoordinates": ProjectedCoordinates, + 'CollectionCode': CollectionCode, + "ThumbnailName": ThumbnailName_PMS, + "ThumbnailPath": ThumbnailPath_PMS, + "xmlFileName": xmlFileName, + "xmlPath": xmlPath, + "DirectoryDepth": "month"} + else: + return {"code": -1, "msg": "找不到指定文件..."} + + # 解压IRS XML文件 + if file_list[1].endswith('.xml') and file_list[1].startswith('GF4_IRS_'): + # 解压XML文件 + tar_file.extract(file_list[1], xml_path) + xmlPath = xml_path + "/" + file_list[1] + xmlFileName = file_list[1] + + # 获取文件流 + meta_file = tar_file.extractfile(file_list[1]) + meta_content = meta_file.read() + dom = minidom.parse(io.StringIO(meta_content.decode("utf-8"))) + + # 产品日期 + ProduceTime = dom.getElementsByTagName('ProduceTime')[0].firstChild.data + StartTime = dom.getElementsByTagName('StartTime')[0].firstChild.data + EndTime = dom.getElementsByTagName('EndTime')[0].firstChild.data + + # 其他信息 + CollectionCode = "GF4_IRS" + ImageGSD = dom.getElementsByTagName('ImageGSD')[0].firstChild.data # 分辨率 + # ProjectedCoordinates = dom.getElementsByTagName('MapProjection')[0].firstChild.data # 投影坐标系 + ProjectedCoordinates = "" # 投影坐标系 + Bands = dom.getElementsByTagName('Bands')[0].firstChild.data # 波段 + CloudPercent = dom.getElementsByTagName('CloudPercent')[0].firstChild.data # 云覆盖 + + # 中心经纬度 + TopLeftLatitude = dom.getElementsByTagName('TopLeftLatitude')[0].firstChild.data # 左上纬度 + TopLeftLongitude = dom.getElementsByTagName('TopLeftLongitude')[0].firstChild.data # 左上经度 + TopRightLatitude = dom.getElementsByTagName('TopRightLatitude')[0].firstChild.data # 右上纬度 + TopRightLongitude = dom.getElementsByTagName('TopRightLongitude')[0].firstChild.data # 右上经度 + BottomRightLatitude = dom.getElementsByTagName('BottomRightLatitude')[0].firstChild.data # 右下纬度 + BottomRightLongitude = dom.getElementsByTagName('BottomRightLongitude')[0].firstChild.data # 右下经度 + BottomLeftLatitude = dom.getElementsByTagName('BottomLeftLatitude')[0].firstChild.data # 左下纬度 + BottomLeftLongitude = dom.getElementsByTagName('BottomLeftLongitude')[0].firstChild.data # 左下经度 + + boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \ + f'{TopRightLongitude} {TopRightLatitude},' \ + f'{BottomRightLongitude} {BottomRightLatitude},' \ + f'{BottomLeftLongitude} {BottomLeftLatitude},' \ + f'{TopLeftLongitude} {TopLeftLatitude}))' + # 构建中红外(IRS)字典 + gf4_irs_dict = {"ProduceTime": ProduceTime, + "StartTime": StartTime, + "EndTime": EndTime, + "CloudPercent": CloudPercent, + "boundaryGeomStr": boundaryGeomStr, + "Bands": Bands, + "ImageGSD": ImageGSD, + "ProjectedCoordinates": ProjectedCoordinates, + 'CollectionCode': CollectionCode, + "ThumbnailName": ThumbnailName_IRS, + "ThumbnailPath": ThumbnailPath_IRS, + "xmlFileName": xmlFileName, + "xmlPath": xmlPath, + "DirectoryDepth": "month"} + else: + return {"code": -1, "msg": "找不到指定文件..."} + + # 关闭压缩文件 + tar_file.close() + # 判断字典是否为空 + if (not gf4_pms_dict) or (not gf4_irs_dict): + return {"code": -1, "msg": "没有满足条件的数据字典..."} + return gf4_pms_dict, gf4_irs_dict + except Exception as e: + print(str(e)) + return {"code": -1, "msg": str(e)} + + +def GetH08Data(in_file, xml_path, thumbnail_path): + """ + 获取葵花8卫星元数据 + :param thumbnail_path: + :param xml_path: + :param in_file: + :return: 元数据字典 + """ + try: + in_path, basename = os.path.split(in_file) + ThumbnailName = os.path.splitext(basename)[0] + "_thumb.jpg" + ThumbnailPath = os.path.join(thumbnail_path, ThumbnailName) + + # 其他信息 + with h5py.File(in_file, mode='r') as f: + start_time = f['start_time'][0] + end_time = f['end_time'][0] + band_id = f['band_id'][:] + bands = ','.join(str(i) for i in band_id) + ImageGSD = '1000, 500, 2000' + + # 生成缩略图 + gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "YES") + in_datasets = gdal.Open(in_file) + meta_data = in_datasets.GetMetadata() + # 取出子数据集 + datasets = in_datasets.GetSubDatasets() + red_data = gdal.Open(datasets[7][0]).ReadAsArray() + gre_data = gdal.Open(datasets[6][0]).ReadAsArray() + blu_data = gdal.Open(datasets[5][0]).ReadAsArray() + img_data = np.array([red_data, gre_data, blu_data]) + img_data = uint16to8(img_data) + # Array转Image + img_data2 = np.transpose(img_data, (1, 2, 0)) + img_data2 = img_data2[:, :, ::-1] + img = Image.fromarray(img_data2) + # 压缩图片大小 + if img_data.shape[1] > img_data.shape[2]: + width = 512 + height = int(width / img_data.shape[1] * img_data.shape[2]) + else: + height = 512 + width = int(height / img_data.shape[1] * img_data.shape[2]) + img.thumbnail((width, height)) + img.save(ThumbnailPath, "PNG") + + # 释放内存 + del in_datasets + del img_data + del img_data2 + del img + + # 生成XML文件 + xmlFileName = os.path.splitext(basename)[0] + ".xml" + xmlPath = os.path.join(xml_path, xmlFileName) + createXML(meta_data, xmlPath) + + # 产品日期 + date_created = meta_data['date_created'] + # band_number = meta_data['band_number'] + + # 经纬度 + upper_left_latitude = meta_data['upper_left_latitude'] + upper_left_longitude = int(meta_data['upper_left_longitude']) - 180 + upper_right_latitude = meta_data['upper_left_latitude'] + upper_right_longitude = 200 - 180 + lower_right_latitude = -60 + lower_right_longitude = 200 - 180 + lower_left_latitude = -60 + lower_left_longitude = str(int(meta_data['upper_left_longitude']) - 180) + + boundaryGeomStr = f'POLYGON(({upper_left_longitude} {upper_left_latitude},' \ + f'{upper_right_longitude} {upper_right_latitude},' \ + f'{lower_right_longitude} {lower_right_latitude},' \ + f'{lower_left_longitude} {lower_left_latitude},' \ + f'{upper_left_longitude} {upper_left_latitude}))' + + # 构建字典 + himawari8_dict = {"ProduceTime": date_created, + "StartTime": "", + "EndTime": "", + "CloudPercent": "", + "boundaryGeomStr": boundaryGeomStr, + "bands": bands, + "ImageGSD": ImageGSD, + "ProjectedCoordinates": "", + "CollectionCode": "", + "ThumbnailPath": ThumbnailPath, + "ThumbnailName": ThumbnailName, + "xmlPath": xmlPath, + "xmlFileName": xmlFileName, + "DirectoryDepth": "day"} + + # 判断字典是否为空 + if not himawari8_dict: + return {"code": -1, "msg": "没有满足条件的数据字典..."} + return himawari8_dict + except Exception as e: + print(str(e)) + return {"code": -1, "msg": str(e)} + + +def GetJPSSData(in_file, xml_path, thumbnail_path): + """ + 获取联合极轨卫星系统(JPSS-1)元数据:NOAA-20(Joint Polar Satellite System spacecraft) + :param xml_path: + :param thumbnail_path: + :param in_file: + :return: 元数据字典 + """ + try: + # 生成缩略图 + in_path, basename = os.path.split(in_file) + ThumbnailName = os.path.splitext(basename)[0] + "_thumb.jpg" + ThumbnailPath = os.path.join(thumbnail_path, ThumbnailName) + + gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "YES") + in_datasets = gdal.Open(in_file) + meta_data = in_datasets.GetMetadata() + # 取出子数据集 + datasets = in_datasets.GetSubDatasets() + red_data = gdal.Open(datasets[0][0]).ReadAsArray() + nir_data = gdal.Open(datasets[3][0]).ReadAsArray() + swir_data = gdal.Open(datasets[9][0]).ReadAsArray() + img_data = np.array([red_data, nir_data, swir_data]) + img_data = uint16to8(img_data) + # Array转Image + img_data2 = np.transpose(img_data, (1, 2, 0)) + img_data2 = img_data2[:, :, ::-1] + img = Image.fromarray(img_data2) + # 压缩图片大小 + if img_data.shape[1] > img_data.shape[2]: + width = 512 + height = int(width / img_data.shape[1] * img_data.shape[2]) + else: + height = 512 + width = int(height / img_data.shape[1] * img_data.shape[2]) + img.thumbnail((width, height)) + img.save(ThumbnailPath, "PNG") + + # 释放内存 + del in_datasets + del img_data + del img_data2 + del img + + # 生成XML文件 + xmlFileName = os.path.splitext(basename)[0] + ".xml" + xmlPath = os.path.join(xml_path, xmlFileName) + createXML(meta_data, xmlPath) + + # 产品日期 + ProductionTime = meta_data['ProductionTime'] + StartTime = meta_data['StartTime'] + EndTime = meta_data['EndTime'] + + # 其他信息 + ImageGSD = str(meta_data['LongName']).split(" ")[-1] + Bands = str(meta_data['title']).split(" ")[1] + + # 中心经纬度 + productUpperLeftLat = meta_data['NorthBoundingCoordinate'] # 左上纬度 + productUpperLeftLong = meta_data['WestBoundingCoordinate'] # 左上经度 + productUpperRightLat = meta_data['NorthBoundingCoordinate'] # 右上纬度 + productUpperRightLong = meta_data['EastBoundingCoordinate'] # 右上经度 + productLowerLeftLat = meta_data['SouthBoundingCoordinate'] # 左下纬度 + productLowerLeftLong = meta_data['WestBoundingCoordinate'] # 左下经度 + productLowerRightLat = meta_data['SouthBoundingCoordinate'] # 右下纬度 + productLowerRightLong = meta_data['EastBoundingCoordinate'] # 右下纬度 + + # 边界几何 + boundaryGeomStr = f'POLYGON(({productUpperLeftLong} {productUpperLeftLat},' \ + f'{productUpperRightLong} {productUpperRightLat},' \ + f'{productLowerRightLong} {productLowerRightLat},' \ + f'{productLowerLeftLong} {productLowerLeftLat},' \ + f'{productUpperLeftLong} {productUpperLeftLat}))' + + # 构建字典 + jpss_dict = {"ProduceTime": ProductionTime, + "StartTime": StartTime, + "EndTime": EndTime, + "CloudPercent": "", + # "TopLeftLatitude": productUpperLeftLat, + # "TopLeftLongitude": productUpperLeftLong, + # "TopRightLatitude": productUpperRightLat, + # "TopRightLongitude": productUpperRightLong, + # "BottomLeftLatitude": productLowerLeftLat, + # "BottomLeftLongitude": productLowerLeftLong, + # "BottomRightLatitude": productLowerRightLat, + # "BottomRightLongitude": productLowerRightLong, + "boundaryGeomStr": boundaryGeomStr, + "bands": Bands, + "ImageGSD": ImageGSD, + "ProjectedCoordinates": "", + "CollectionCode": "", + "ThumbnailPath": ThumbnailPath, + "ThumbnailName": ThumbnailName, + "xmlPath": xmlPath, + "xmlFileName": xmlFileName, + "DirectoryDepth": "day"} + + # 判断字典是否为空 + if not jpss_dict: + return {"code": -1, "msg": "没有满足条件的数据字典..."} + print(jpss_dict) + return jpss_dict + except Exception as e: + print(str(e)) + return {"code": -1, "msg": str(e)} + + +def GetSNPPData(in_file, xml_path, thumbnail_path): + """ + 获取Suomi National Polar-orbiting Partnership(SNPP)元数据 + :param thumbnail_path: + :param xml_path: + :param in_file: + :return: 元数据字典 + """ + try: + in_path, basename = os.path.split(in_file) + # 生成缩略图 + ThumbnailName = os.path.splitext(basename)[0] + "_thumb.jpg" + ThumbnailPath = os.path.join(thumbnail_path, ThumbnailName) + + gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "YES") + in_datasets = gdal.Open(in_file) + meta_data = in_datasets.GetMetadata() + + # 取出子数据集 + datasets = in_datasets.GetSubDatasets() + red_data = gdal.Open(datasets[0][0]).ReadAsArray() + gre_data = gdal.Open(datasets[3][0]).ReadAsArray() + blu_data = gdal.Open(datasets[9][0]).ReadAsArray() + img_data = np.array([red_data, gre_data, blu_data]) + img_data = uint16to8(img_data) + # Array转Image + img_data2 = np.transpose(img_data, (1, 2, 0)) + img_data2 = img_data2[:, :, ::-1] + img = Image.fromarray(img_data2) + # 压缩图片大小 + if img_data.shape[1] > img_data.shape[2]: + width = 512 + height = int(width / img_data.shape[1] * img_data.shape[2]) + else: + height = 512 + width = int(height / img_data.shape[1] * img_data.shape[2]) + img.thumbnail((width, height)) + img.save(ThumbnailPath, "PNG") + + # 释放内存 + del in_datasets + del img_data + del img_data2 + del img + + # 生成XML文件 + xmlFileName = os.path.splitext(basename)[0] + ".xml" + xmlPath = os.path.join(xml_path, xmlFileName) + createXML(meta_data, xmlPath) + + # 产品日期 + ProductionTime = meta_data['ProductionTime'] + StartTime = meta_data['StartTime'] + EndTime = meta_data['EndTime'] + + # 其他信息 + ImageGSD = str(meta_data['LongName']).split(" ")[-1][:-1] + Bands = str(meta_data['title']) + + # 中心经纬度 + productUpperLeftLat = meta_data['NorthBoundingCoordinate'] # 左上纬度 + productUpperLeftLong = meta_data['WestBoundingCoordinate'] # 左上经度 + productUpperRightLat = meta_data['NorthBoundingCoordinate'] # 右上纬度 + productUpperRightLong = meta_data['EastBoundingCoordinate'] # 右上经度 + productLowerLeftLat = meta_data['SouthBoundingCoordinate'] # 左下纬度 + productLowerLeftLong = meta_data['WestBoundingCoordinate'] # 左下经度 + productLowerRightLat = meta_data['SouthBoundingCoordinate'] # 右下纬度 + productLowerRightLong = meta_data['EastBoundingCoordinate'] # 右下纬度 + + boundaryGeomStr = f'POLYGON(({productUpperLeftLong} {productUpperLeftLat},' \ + f'{productUpperRightLong} {productUpperRightLat},' \ + f'{productLowerRightLong} {productLowerRightLat},' \ + f'{productLowerLeftLong} {productLowerLeftLat},' \ + f'{productUpperLeftLong} {productUpperLeftLat}))' + + # 构建字典 + snpp_dict = {"ProductionTime": ProductionTime, + "StartTime": StartTime, + "EndTime": EndTime, + "CloudPercent": "", + "boundaryGeomStr": boundaryGeomStr, + "bands": Bands, + "ImageGSD": ImageGSD, + "ProjectedCoordinates": "", + "CollectionCode": "", + "ThumbnailPath": ThumbnailPath, + "ThumbnailName": ThumbnailName, + "xmlPath": xmlPath, + "xmlFileName": xmlFileName, + "DirectoryDepth": "day"} + + # 判断字典是否为空 + if not snpp_dict: + return {"code": -1, "msg": "没有满足条件的数据字典..."} + return snpp_dict + except Exception as e: + print(str(e)) + return {"code": -1, "msg": str(e)} + + +def GetSentinel1Data(in_file, xml_path, thumbnail_path): + """ + 获取哨兵1卫星元数据 + :param thumbnail_path: + :param xml_path: + :param in_file: + :return: 元数据字典 + """ + try: + in_path, basename = os.path.split(in_file) + with zipfile.ZipFile(in_file, mode='r') as zip_file: + xmlFileName = os.path.splitext(basename)[0] + ".xml" + xmlPath = os.path.join(xml_path, xmlFileName) + ThumbnailName = os.path.splitext(basename)[0] + "_thumb.jpg" + ThumbnailPath = os.path.join(thumbnail_path, ThumbnailName) + for member in zip_file.namelist(): + if re.match(r'[0-9a-zA-Z\_]+.SAFE/annotation/s1a-iw-grd-vv[0-9a-z\-]+.xml', member): + # 输出xml文件 + meta_data = zip_file.read(member) + with open(xmlPath, "wb") as fout: + fout.write(meta_data) + # 产品日期 + meta_content = zip_file.open(member) + dom = minidom.parse(meta_content) + ProduceTime = dom.getElementsByTagName('qualityInformation')[ + 0].getElementsByTagName('qualityDataList')[ + 0].getElementsByTagName('qualityData')[ + 0].getElementsByTagName('azimuthTime')[ + 0].firstChild.data + StartTime = dom.getElementsByTagName('adsHeader')[0].getElementsByTagName('startTime')[ + 0].firstChild.data + StopTime = dom.getElementsByTagName('adsHeader')[0].getElementsByTagName('stopTime')[ + 0].firstChild.data + elif re.match(r'[0-9a-zA-Z\_]+.SAFE/preview/map-overlay.kml', member): + # 读取其他信息 + meta_content = zip_file.open(member) + dom = minidom.parse(meta_content) + coordinates = dom.getElementsByTagName('coordinates')[0].firstChild.data + + # 经纬度 + lon_lat = re.split(r'\s', coordinates) + TopLeftLatitude = re.split(r'\,', lon_lat[0])[1] # 左上纬度 + TopLeftLongitude = re.split(r'\,', lon_lat[0])[0] # 左上经度 + TopRightLatitude = re.split(r'\,', lon_lat[1])[1] # 右上纬度 + TopRightLongitude = re.split(r'\,', lon_lat[1])[0] # 右上经度 + BottomRightLatitude = re.split(r'\,', lon_lat[2])[1] # 右下纬度 + BottomRightLongitude = re.split(r'\,', lon_lat[2])[0] # 右下经度 + BottomLeftLatitude = re.split(r'\,', lon_lat[3])[1] # 左下纬度 + BottomLeftLongitude = re.split(r'\,', lon_lat[3])[0] # 左下经度 + + boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \ + f'{TopRightLongitude} {TopRightLatitude},' \ + f'{BottomRightLongitude} {BottomRightLatitude},' \ + f'{BottomLeftLongitude} {BottomLeftLatitude},' \ + f'{TopLeftLongitude} {TopLeftLatitude}))' + + elif re.match(r'[0-9a-zA-Z\_]+.SAFE/preview/quick-look.png', member): + # 输出缩略图 + thumb_data = zip_file.read(member) + with open(ThumbnailPath, "wb") as fout: + fout.write(thumb_data) + else: + continue + # 生成字典 + S1_dict = {"ProduceTime": ProduceTime, + "StartTime": StartTime, + "StopTime": StopTime, + "CloudPercent": "", + "boundaryGeomStr": boundaryGeomStr, + "bands": "Amplitude_VH,Intensity_VH,Amplitude_VV,Intensity_VV", + # "NumberBands": "", + "ImageGSD": "10", + "ProjectedCoordinates": '', + "CollectionCode": '', + "ThumbnailName": ThumbnailName, + "ThumbnailPath": ThumbnailPath, + "xmlPath": xmlPath, + "xmlFileName": xmlFileName, + "DirectoryDepth": "month"} + zip_file.close() + if not S1_dict: + return {"code": -1, "msg": "没有满足条件的数据字典..."} + return S1_dict + except Exception as e: + print(str(e)) + return {"code": -1, "msg": str(e)} + + +def GetSentinel2Data(in_file, xml_path, thumbnail_path): + """ + 获取哨兵2卫星元数据 + :param thumbnail_path: + :param xml_path: + :param in_file: + :return: 元数据字典 + """ + try: + in_path, basename = os.path.split(in_file) + with zipfile.ZipFile(in_file, 'r', zipfile.ZIP_DEFLATED) as zip_file: + extensions = ('_B02_60m.jp2', '_B03_60m.jp2', '_B04_60m.jp2', '.SAFE/MTD_MSIL2A.xml') + file_list = [file for file in zip_file.namelist() if file.endswith(extensions)] + file_list.sort() + + # 生成缩略图 + ThumbnailName = os.path.splitext(basename)[0] + "_thumb.jpg" + ThumbnailPath = os.path.join(thumbnail_path, ThumbnailName) + + # bgr_data = ['/vsizip/%s/%s' % (in_file, file) for file in file_list[:3]] + # out_vrt = '/vsimem/stacked.vrt' # 波段合成输出虚拟路径 + # # 将多个源文件合成为一个VRT(virtual gdal dataset)文件 + # out_dataset = gdal.BuildVRT(out_vrt, bgr_data, separate=True) + # # 将VRT文件转换为目标格式的图像 + # gdal.Translate(ThumbnailPath, + # out_dataset, + # format='JPEG', + # outputType=gdal.GDT_Byte, + # widthPct=10, + # heightPct=10, + # creationOptions=["TILED=YES", "COMPRESS=LZW"]) + # # 释放内存 + # # gdal.GetDriverByName("VRT").Delete('/vsimem/stacked.vrt') + # gdal.Unlink('/vsimem/stacked.vrt') + # del out_dataset + + rgb_list = [] + for file in file_list[:3]: + sub_dataset = gdal.Open('/vsizip/%s/%s' % (in_file, file)) + sub_array = sub_dataset.ReadAsArray() + rgb_list.append(sub_array) + img_data = np.array([rgb_list[2], rgb_list[1], rgb_list[0]]) + img_data = uint16to8(img_data) + + # Array转Image + img_data2 = np.transpose(img_data, (1, 2, 0)) + img_data2 = img_data2[:, :, ::-1] + img = Image.fromarray(img_data2) + # 压缩图片大小 + if img_data.shape[1] > img_data.shape[2]: + width = 512 + height = int(width / img_data.shape[1] * img_data.shape[2]) + else: + height = 512 + width = int(height / img_data.shape[1] * img_data.shape[2]) + img.thumbnail((width, height)) + img.save(ThumbnailPath, "PNG") + + # 释放内存 + del rgb_list + del img_data + del img_data2 + del img + + # 解压多光谱XML文件 + if file_list[3].endswith('.SAFE/MTD_MSIL2A.xml'): + # 生成XML文件 + xmlFileName = os.path.splitext(basename)[0] + ".xml" + xmlPath = os.path.join(xml_path, xmlFileName) + meta_data = zip_file.read(file_list[3]) + with open(xmlPath, "wb") as fout: + fout.write(meta_data) + + # 读取其他信息 + meta_content = zip_file.open(file_list[3]) + dom = minidom.parse(meta_content) + cloud_percent = dom.getElementsByTagName('n1:Quality_Indicators_Info')[ + 0].getElementsByTagName('Cloud_Coverage_Assessment')[0].firstChild.data + ImageGSD = '10, 20, 60' + ProjectedCoordinates = dom.getElementsByTagName('n1:Geometric_Info')[ + 0].getElementsByTagName('Coordinate_Reference_System')[ + 0].getElementsByTagName('GEO_TABLES')[0].firstChild.data + + # 产品日期 + ProduceTime = dom.getElementsByTagName('n1:General_Info')[0].getElementsByTagName('Product_Info')[ + 0].getElementsByTagName('GENERATION_TIME')[0].firstChild.data + StartTime = dom.getElementsByTagName('n1:General_Info')[0].getElementsByTagName('Product_Info')[ + 0].getElementsByTagName('PRODUCT_START_TIME')[0].firstChild.data + StopTime = dom.getElementsByTagName('n1:General_Info')[0].getElementsByTagName('Product_Info')[ + 0].getElementsByTagName('PRODUCT_STOP_TIME')[0].firstChild.data + + # 经纬度 + lon_lat = dom.getElementsByTagName('n1:Geometric_Info')[0].getElementsByTagName('Product_Footprint')[ + 0].getElementsByTagName('Product_Footprint')[0].getElementsByTagName('Global_Footprint')[ + 0].getElementsByTagName('EXT_POS_LIST')[0].firstChild.data + TopLeftLatitude = re.split(r'\s', lon_lat)[0] # 左上纬度 + TopLeftLongitude = re.split(r'\s', lon_lat)[1] # 左上经度 + TopRightLatitude = re.split(r'\s', lon_lat)[2] # 右上纬度 + TopRightLongitude = re.split(r'\s', lon_lat)[3] # 右上经度 + BottomRightLatitude = re.split(r'\s', lon_lat)[4] # 右下纬度 + BottomRightLongitude = re.split(r'\s', lon_lat)[5] # 右下经度 + BottomLeftLatitude = re.split(r'\s', lon_lat)[6] # 左下纬度 + BottomLeftLongitude = re.split(r'\s', lon_lat)[7] # 左下经度 + + boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \ + f'{TopRightLongitude} {TopRightLatitude},' \ + f'{BottomRightLongitude} {BottomRightLatitude},' \ + f'{BottomLeftLongitude} {BottomLeftLatitude},' \ + f'{TopLeftLongitude} {TopLeftLatitude}))' + + # 生成字典 + S2_dict = {"ProduceTime": ProduceTime, + "StartTime": StartTime, + "StopTime": StopTime, + "CloudPercent": cloud_percent, + "boundaryGeomStr": boundaryGeomStr, + "bands": "1,2,3,4,5,6,7,8,9,10,11,12", + # "NumberBands": '12', + "ImageGSD": ImageGSD, + "ProjectedCoordinates": ProjectedCoordinates, + "CollectionCode": '', + "ThumbnailName": ThumbnailName, + "ThumbnailPath": ThumbnailPath, + "xmlPath": xmlPath, + "xmlFileName": xmlFileName, + "DirectoryDepth": "month"} + zip_file.close() + if not S2_dict: + return {"code": -1, "msg": "没有满足条件的数据字典..."} + return S2_dict + except Exception as e: + print(str(e)) + return {"code": -1, "msg": str(e)} + + +def GetSentinel3OLData(in_file, xml_path, thumbnail_path): + """ + 获取哨兵3 OLCI(海陆色度计)卫星元数据(有待修改) + :param thumbnail_path: + :param xml_path: + :param in_file: + :return: 元数据字典 + """ + try: + in_path, basename = os.path.split(in_file) + with zipfile.ZipFile(in_file, 'r', zipfile.ZIP_DEFLATED) as zip_file: + extensions = ('Oa03_radiance.nc', 'Oa05_radiance.nc', 'Oa08_radiance.nc', 'xfdumanifest.xml') + file_list = [file for file in zip_file.namelist() if file.endswith(extensions)] + + # 生成缩略图 + ThumbnailName = os.path.splitext(basename)[0] + "_thumb.jpg" + ThumbnailPath = os.path.join(thumbnail_path, ThumbnailName) + rgb_list = [] + for file in file_list[:3]: + sub_dataset = gdal.Open('/vsizip/%s/%s' % (in_file, file)) + sub_array = sub_dataset.ReadAsArray() + rgb_list.append(sub_array) + img_data = np.array([rgb_list[2], rgb_list[1], rgb_list[0]]) + img_data = uint16to8(img_data) + + # Array转Image + img_data2 = np.transpose(img_data, (1, 2, 0)) + img_data2 = img_data2[:, :, ::-1] + img = Image.fromarray(img_data2) + # 压缩图片大小 + if img_data.shape[1] > img_data.shape[2]: + width = 512 + height = int(width / img_data.shape[1] * img_data.shape[2]) + else: + height = 512 + width = int(height / img_data.shape[1] * img_data.shape[2]) + img.thumbnail((width, height)) + img.save(ThumbnailPath, "PNG") + + # 释放内存 + del rgb_list + del img_data + del img_data2 + del img + + # 解压XML文件 + if file_list[3].endswith('xfdumanifest.xml'): + # 生成XML文件 + xmlFileName = os.path.splitext(basename)[0] + ".xml" + xmlPath = os.path.join(xml_path, xmlFileName) + meta_data = zip_file.read(file_list[3]) + with open(xmlPath, "wb") as fout: + fout.write(meta_data) + + # 读取其他信息 + CollectionCode = "Sentinel3_OLCI_L1" + meta_content = zip_file.open(file_list[3]) + dom = minidom.parse(meta_content) + ProjectedCoordinates = "" + CloudPercent = "" + + # 产品日期 + ProduceTime = dom.getElementsByTagName('sentinel3:creationTime')[0].firstChild.data + StartTime = dom.getElementsByTagName('sentinel3:receivingStartTime')[0].firstChild.data + StopTime = dom.getElementsByTagName('sentinel3:receivingStopTime')[0].firstChild.data + + # 经纬度 + TopLeftLatitude = dom.getElementsByTagName('sentinel-safe:y')[0].firstChild.data # 左上纬度 + TopLeftLongitude = dom.getElementsByTagName('sentinel-safe:x')[0].firstChild.data # 左上经度 + TopRightLatitude = dom.getElementsByTagName('sentinel-safe:y')[0].firstChild.data # 右上纬度 + TopRightLongitude = dom.getElementsByTagName('sentinel-safe:x')[2].firstChild.data # 右上经度 + BottomRightLatitude = dom.getElementsByTagName('sentinel-safe:y')[2].firstChild.data # 右下纬度 + BottomRightLongitude = dom.getElementsByTagName('sentinel-safe:x')[2].firstChild.data # 右下经度 + BottomLeftLatitude = dom.getElementsByTagName('sentinel-safe:y')[2].firstChild.data # 左下纬度 + BottomLeftLongitude = dom.getElementsByTagName('sentinel-safe:x')[0].firstChild.data # 左下经度 + + # boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \ + # f'{TopRightLongitude} {TopRightLatitude},' \ + # f'{BottomRightLongitude} {BottomRightLatitude},' \ + # f'{BottomLeftLongitude} {BottomLeftLatitude},' \ + # f'{TopLeftLongitude} {TopLeftLatitude}))' + + # 生成字典 + S3_dict = {"ProduceTime": ProduceTime, + "StartTime": StartTime, + "StopTime": StopTime, + "CloudPercent": CloudPercent, + "boundaryGeomStr": "", + "bands": "Oa01,Oa02,Oa03,Oa04,Oa05,Oa06,Oa07,Oa08,Oa09,Oa10,Oa11,Oa12,Oa13,Oa14,Oa15,Oa16," + "Oa17,Oa18,Oa19,Oa20,Oa21", + # "NumberBands": '21', + "ImageGSD": "270,294", + "ProjectedCoordinates": ProjectedCoordinates, + "CollectionCode": CollectionCode, + "ThumbnailName": ThumbnailName, + "ThumbnailPath": ThumbnailPath, + "xmlPath": xmlPath, + "xmlFileName": xmlFileName, + "DirectoryDepth": "month"} + zip_file.close() + if not S3_dict: + return {"code": -1, "msg": "没有满足条件的数据字典..."} + return S3_dict + except Exception as e: + print(str(e)) + return {"code": -1, "msg": str(e)} + + +def GetHJ1Data(in_file, xml_path, thumbnail_path): + """ + 获取环境1号(HJ-1)卫星元数据 + :param thumbnail_path: + :param xml_path: + :param in_file: + :return: 元数据字典 + """ + try: + with tarfile.open(in_file, mode='r') as tar_file: + in_path, basename = os.path.split(in_file) + for member in tar_file.getnames(): + if member.endswith("THUMB.JPG"): + # 解压缩略图 + tar_file.extract(member, thumbnail_path) + ThumbnailPath = thumbnail_path + "/" + member + ThumbnailName = member.split('/')[1] + elif member.endswith(".XML"): + # 解压XML文件 + tar_file.extract(member, xml_path) + xmlPath = xml_path + "/" + member + xmlFileName = member.split('/')[1] + + # 获取文件流 + meta_file = tar_file.extractfile(member) + meta_content = meta_file.read() + dom = minidom.parse(io.StringIO(meta_content.decode("utf-8"))) + + # 产品日期 + productDate = dom.getElementsByTagName('productDate')[0].firstChild.data + imagingStartTime = dom.getElementsByTagName('imagingStartTime')[0].firstChild.data # 开始时间 + imagingStopTime = dom.getElementsByTagName('imagingStopTime')[0].firstChild.data # 结束时间 + + # 其他信息 + pixelSpacing = dom.getElementsByTagName('pixelSpacing')[0].firstChild.data # 分辨率 + # earthModel = dom.getElementsByTagName('earthModel')[0].firstChild.data # 投影 + mapProjection = dom.getElementsByTagName('mapProjection')[0].firstChild.data # 投影坐标系 + # zone = dom.getElementsByTagName('zone')[0].firstChild.data # 带号 + bands = dom.getElementsByTagName('bands')[0].firstChild.data # 波段 + + # 中心经纬度 + TopLeftLatitude = dom.getElementsByTagName('productUpperLeftLat')[0].firstChild.data # 左上纬度 + TopLeftLongitude = dom.getElementsByTagName('productUpperLeftLong')[0].firstChild.data # 左上经度 + TopRightLatitude = dom.getElementsByTagName('productUpperRightLat')[0].firstChild.data # 右上纬度 + TopRightLongitude = dom.getElementsByTagName('productUpperRightLong')[0].firstChild.data # 右上经度 + BottomLeftLatitude = dom.getElementsByTagName('productLowerLeftLat')[0].firstChild.data # 左下纬度 + BottomLeftLongitude = dom.getElementsByTagName('productLowerLeftLong')[0].firstChild.data # 左下经度 + BottomRightLatitude = dom.getElementsByTagName('productLowerRightLat')[0].firstChild.data # 右下纬度 + BottomRightLongitude = dom.getElementsByTagName('productLowerRightLong')[0].firstChild.data # 右下纬度 + + # 边界几何 + boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \ + f'{TopRightLongitude} {TopRightLatitude},' \ + f'{BottomRightLongitude} {BottomRightLatitude},' \ + f'{BottomLeftLongitude} {BottomLeftLatitude},' \ + f'{TopLeftLongitude} {TopLeftLatitude}))' + else: + continue + # 构建字典 + hj1_dict = {"ProductTime": productDate, + "StartTime": imagingStartTime, + "EndTime": imagingStopTime, + "CloudPercent": "", + # "TopLeftLatitude": TopLeftLatitude, + # "TopLeftLongitude": TopLeftLongitude, + # "TopRightLatitude": TopRightLatitude, + # "TopRightLongitude": TopRightLongitude, + # "BottomLeftLatitude": BottomLeftLatitude, + # "BottomLeftLongitude": BottomLeftLongitude, + # "BottomRightLatitude": BottomRightLatitude, + # "BottomRightLongitude": BottomRightLongitude, + "boundaryGeomStr": boundaryGeomStr, + "bands": bands, + "ImageGSD": pixelSpacing, + "ProjectedCoordinates": mapProjection, + "CollectionCode": "", + "ThumbnailPath": ThumbnailPath, + "ThumbnailName": ThumbnailName, + "xmlPath": xmlPath, + "xmlFileName": xmlFileName, + "DirectoryDepth": "month"} + # 关闭压缩文件 + tar_file.close() + # 判断字典是否为空 + if not hj1_dict: + return {"code": -1, "msg": "没有满足条件的数据字典..."} + return hj1_dict + except Exception as e: + print(str(e)) + return {"code": -1, "msg": str(e)} + + +def GetZY02CData(in_file, xml_path, thumbnail_path): + """ + 获取资源2(ZY-2)卫星元数据: + :param thumbnail_path: + :param xml_path: + :param in_file: + :return: 元数据字典 + """ + try: + zy2_mux_dict, zy2_pan_dict = dict(), dict() + in_path, basename = os.path.split(in_file) + with tarfile.open(in_file, mode='r') as tar_file: + for member in tar_file.getnames(): + if member.endswith("MUX_thumb.jpg"): + # 解压多光谱缩略图 + tar_file.extract(member, thumbnail_path) + ThumbnailPath_MUX = thumbnail_path + "/" + member + ThumbnailName_MUX = member + elif member.endswith("PAN_thumb.jpg"): + # 解压全色缩略图 + tar_file.extract(member, thumbnail_path) + ThumbnailPath_PAN = thumbnail_path + "/" + member + ThumbnailName_PAN = member + + if member.endswith('MUX.xml'): + # 解压XML文件 + tar_file.extract(member, xml_path) + xmlPath = xml_path + "/" + member + xmlFileName = member + + # 获取文件流 + meta_file = tar_file.extractfile(member) + meta_content = meta_file.read() + dom = minidom.parse(io.StringIO(meta_content.decode("utf-8"))) + + # 产品日期 + ProduceTime = dom.getElementsByTagName('ProduceTime')[0].firstChild.data + StartTime = dom.getElementsByTagName('StartTime')[0].firstChild.data + EndTime = dom.getElementsByTagName('EndTime')[0].firstChild.data + + # 其他信息 + ImageGSD = dom.getElementsByTagName('ImageGSD')[0].firstChild.data # 分辨率 + MapProjection = dom.getElementsByTagName('MapProjection')[0].firstChild.data # 投影坐标系 + EarthEllipsoid = dom.getElementsByTagName('EarthEllipsoid')[0].firstChild.data # 地理坐标系 + ZoneNo = dom.getElementsByTagName('ZoneNo')[0].firstChild.data # 投影分带带号 + Bands = dom.getElementsByTagName('Bands')[0].firstChild.data # 波段 + CloudPercent = dom.getElementsByTagName('CloudPercent')[0].firstChild.data # 云覆盖 + + # 中心经纬度 + TopLeftLatitude = dom.getElementsByTagName('TopLeftLatitude')[0].firstChild.data # 左上纬度 + TopLeftLongitude = dom.getElementsByTagName('TopLeftLongitude')[0].firstChild.data # 左上经度 + TopRightLatitude = dom.getElementsByTagName('TopRightLatitude')[0].firstChild.data # 右上纬度 + TopRightLongitude = dom.getElementsByTagName('TopRightLongitude')[0].firstChild.data # 右上经度 + BottomRightLatitude = dom.getElementsByTagName('BottomRightLatitude')[0].firstChild.data # 右下纬度 + BottomRightLongitude = dom.getElementsByTagName('BottomRightLongitude')[0].firstChild.data # 右下经度 + BottomLeftLatitude = dom.getElementsByTagName('BottomLeftLatitude')[0].firstChild.data # 左下纬度 + BottomLeftLongitude = dom.getElementsByTagName('BottomLeftLongitude')[0].firstChild.data # 左下经度 + + # 几何边界 + boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \ + f'{TopRightLongitude} {TopRightLatitude},' \ + f'{BottomRightLongitude} {BottomRightLatitude},' \ + f'{BottomLeftLongitude} {BottomLeftLatitude},' \ + f'{TopLeftLongitude} {TopLeftLatitude}))' + + # 构建多光谱字典 + zy2_mux_dict = {"ProduceTime": ProduceTime, + "StartTime": StartTime, + "EndTime": EndTime, + "CloudPercent": CloudPercent, + # "TopLeftLatitude": TopLeftLatitude, + # "TopLeftLongitude": TopLeftLongitude, + # "TopRightLatitude": TopRightLatitude, + # "TopRightLongitude": TopRightLongitude, + # "BottomRightLatitude": BottomRightLatitude, + # "BottomRightLongitude": BottomRightLongitude, + # "BottomLeftLatitude": BottomLeftLatitude, + # "BottomLeftLongitude": BottomLeftLongitude, + "boundaryGeomStr": boundaryGeomStr, + "bands": Bands, + "ImageGSD": ImageGSD, + "ProjectedCoordinates": MapProjection, + 'CollectionCode': "", + "ThumbnailPath": ThumbnailPath_MUX, + "ThumbnailName": ThumbnailName_MUX, + "xmlPath": xmlPath, + "xmlFileName": xmlFileName, + "DirectoryDepth": "day"} + elif member.endswith('PAN.xml'): + # 解压XML文件 + tar_file.extract(member, xml_path) + xmlPath = xml_path + "/" + member + xmlFileName = member + + # 获取文件流 + meta_file = tar_file.extractfile(member) + meta_content = meta_file.read() + dom = minidom.parse(io.StringIO(meta_content.decode("utf-8"))) + + # 产品日期 + ProduceTime = dom.getElementsByTagName('ProduceTime')[0].firstChild.data + StartTime = dom.getElementsByTagName('StartTime')[0].firstChild.data + EndTime = dom.getElementsByTagName('EndTime')[0].firstChild.data + + # 其他信息 + ImageGSD = dom.getElementsByTagName('ImageGSD')[0].firstChild.data # 分辨率 + MapProjection = dom.getElementsByTagName('MapProjection')[0].firstChild.data # 投影坐标系 + EarthEllipsoid = dom.getElementsByTagName('EarthEllipsoid')[0].firstChild.data # 地理坐标系 + ZoneNo = dom.getElementsByTagName('ZoneNo')[0].firstChild.data # 投影分带带号 + Bands = dom.getElementsByTagName('Bands')[0].firstChild.data # 波段 + CloudPercent = dom.getElementsByTagName('CloudPercent')[0].firstChild.data # 云覆盖 + + # 中心经纬度 + TopLeftLatitude = dom.getElementsByTagName('TopLeftLatitude')[0].firstChild.data # 左上纬度 + TopLeftLongitude = dom.getElementsByTagName('TopLeftLongitude')[0].firstChild.data # 左上经度 + TopRightLatitude = dom.getElementsByTagName('TopRightLatitude')[0].firstChild.data # 右上纬度 + TopRightLongitude = dom.getElementsByTagName('TopRightLongitude')[0].firstChild.data # 右上经度 + BottomRightLatitude = dom.getElementsByTagName('BottomRightLatitude')[0].firstChild.data # 右下纬度 + BottomRightLongitude = dom.getElementsByTagName('BottomRightLongitude')[0].firstChild.data # 右下经度 + BottomLeftLatitude = dom.getElementsByTagName('BottomLeftLatitude')[0].firstChild.data # 左下纬度 + BottomLeftLongitude = dom.getElementsByTagName('BottomLeftLongitude')[0].firstChild.data # 左下经度 + + # 几何边界 + boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \ + f'{TopRightLongitude} {TopRightLatitude},' \ + f'{BottomRightLongitude} {BottomRightLatitude},' \ + f'{BottomLeftLongitude} {BottomLeftLatitude},' \ + f'{TopLeftLongitude} {TopLeftLatitude}))' + + # 构建全色字典 + zy2_pan_dict = {"ProduceTime": ProduceTime, + "StartTime": StartTime, + "EndTime": EndTime, + "CloudPercent": CloudPercent, + # "TopLeftLatitude": TopLeftLatitude, + # "TopLeftLongitude": TopLeftLongitude, + # "TopRightLatitude": TopRightLatitude, + # "TopRightLongitude": TopRightLongitude, + # "BottomRightLatitude": BottomRightLatitude, + # "BottomRightLongitude": BottomRightLongitude, + # "BottomLeftLatitude": BottomLeftLatitude, + # "BottomLeftLongitude": BottomLeftLongitude, + "boundaryGeomStr": boundaryGeomStr, + "bands": Bands, + "ImageGSD": ImageGSD, + "ProjectedCoordinates": MapProjection, + 'CollectionCode': "", + "ThumbnailPath": ThumbnailPath_PAN, + "ThumbnailName": ThumbnailName_PAN, + "xmlPath": xmlPath, + "xmlFileName": xmlFileName, + "DirectoryDepth": "day"} + else: + continue + # 关闭压缩文件 + tar_file.close() + # 判断字典是否为空 + if (not zy2_mux_dict) or (not zy2_pan_dict): + return {"code": -1, "msg": "没有满足条件的数据字典..."} + return zy2_mux_dict, zy2_pan_dict + except Exception as e: + print(str(e)) + return {"code": -1, "msg": str(e)} + + +def GetZY3Data(in_file, xml_path, thumbnail_path): + """ + 获取资源3(ZY-3)卫星元数据: + :param thumbnail_path: + :param xml_path: + :param in_file: + :return: 元数据字典 + """ + try: + zy3_dict = dict() + with tarfile.open(in_file, mode='r') as tar_file: + in_path, basename = os.path.split(in_file) + for member in tar_file.getnames(): + if member.endswith("thumb.jpg"): + # 解压缩略图 + tar_file.extract(member, thumbnail_path) + ThumbnailPath = thumbnail_path + "/" + member + ThumbnailName = member + if not member.endswith('.xml'): + continue + elif member.endswith('order.xml'): + continue + else: + # 解压XML文件 + tar_file.extract(member, xml_path) + xmlPath = xml_path + "/" + member + xmlFileName = member + + # 获取文件流 + meta_file = tar_file.extractfile(member) + meta_content = meta_file.read() + dom = minidom.parse(io.StringIO(meta_content.decode("utf-8"))) + + # 产品日期 + ProduceTime = dom.getElementsByTagName('ProduceTime')[0].firstChild.data + StartTime = dom.getElementsByTagName('StartTime')[0].firstChild.data + EndTime = dom.getElementsByTagName('EndTime')[0].firstChild.data + + # 其他信息 + ImageGSD = dom.getElementsByTagName('ImageGSD')[0].firstChild.data # 分辨率 + MapProjection = dom.getElementsByTagName('MapProjection')[0].firstChild.data # 投影坐标系 + EarthEllipsoid = dom.getElementsByTagName('EarthEllipsoid')[0].firstChild.data # 地理坐标系 + ZoneNo = dom.getElementsByTagName('ZoneNo')[0].firstChild.data # 投影分带带号 + Bands = dom.getElementsByTagName('Bands')[0].firstChild.data # 波段 + CloudPercent = dom.getElementsByTagName('CloudPercent')[0].firstChild.data # 云覆盖 + + # 中心经纬度 + TopLeftLatitude = dom.getElementsByTagName('TopLeftLatitude')[0].firstChild.data # 左上纬度 + TopLeftLongitude = dom.getElementsByTagName('TopLeftLongitude')[0].firstChild.data # 左上经度 + TopRightLatitude = dom.getElementsByTagName('TopRightLatitude')[0].firstChild.data # 右上纬度 + TopRightLongitude = dom.getElementsByTagName('TopRightLongitude')[0].firstChild.data # 右上经度 + BottomRightLatitude = dom.getElementsByTagName('BottomRightLatitude')[0].firstChild.data # 右下纬度 + BottomRightLongitude = dom.getElementsByTagName('BottomRightLongitude')[0].firstChild.data # 右下经度 + BottomLeftLatitude = dom.getElementsByTagName('BottomLeftLatitude')[0].firstChild.data # 左下纬度 + BottomLeftLongitude = dom.getElementsByTagName('BottomLeftLongitude')[0].firstChild.data # 左下经度 + + # 边界几何 + boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \ + f'{TopRightLongitude} {TopRightLatitude},' \ + f'{BottomRightLongitude} {BottomRightLatitude},' \ + f'{BottomLeftLongitude} {BottomLeftLatitude},' \ + f'{TopLeftLongitude} {TopLeftLatitude}))' + + # 构建字典 + zy3_dict = {"ProduceTime": ProduceTime, + "StartTime": StartTime, + "EndTime": EndTime, + "CloudPercent": CloudPercent, + # "TopLeftLatitude": TopLeftLatitude, + # "TopLeftLongitude": TopLeftLongitude, + # "TopRightLatitude": TopRightLatitude, + # "TopRightLongitude": TopRightLongitude, + # "BottomRightLatitude": BottomRightLatitude, + # "BottomRightLongitude": BottomRightLongitude, + # "BottomLeftLatitude": BottomLeftLatitude, + # "BottomLeftLongitude": BottomLeftLongitude, + "boundaryGeomStr": boundaryGeomStr, + "bands": Bands, + "ImageGSD": ImageGSD, + "ProjectedCoordinates": MapProjection, + "CollectionCode": "", + "ThumbnailPath": ThumbnailPath, + "ThumbnailName": ThumbnailName, + "xmlPath": xmlPath, + "xmlFileName": xmlFileName, + "DirectoryDepth": "month"} + # 关闭压缩文件 + tar_file.close() + # 判断是否为空 + if not zy3_dict: + return {"code": -1, "msg": "没有满足条件的数据字典..."} + return zy3_dict + except Exception as e: + print(str(e)) + return {"code": -1, "msg": str(e)} + + +if __name__ == '__main__': + HJ1FilePath = r"Y:\不同传感器数据\HJ-1\HJ1A-CCD2-450-80-20090501-L20000106616.tar.gz" + JPSSFilePath = r"Y:\不同传感器数据\JPSS\VJ102IMG.A2021159.0542.002.2021159094907.nc" + ZY2FilePath = r"Y:\不同传感器数据\ZY-2\ZY02C_PMS_E115.9_N36.2_20120422_L2C0000391981.tar.gz" + ZY3FilePath = r"Y:\不同传感器数据\ZY-3\ZY3_MUX_E83.3_N43.3_20120405_L2A0000301226.tar.gz" + + S1FilePath = r'Y:\不同传感器数据\SENTINEL-1\S1A_IW_GRDH_1SDV_20210407T095634_20210407T095659_037343_046675_8E66.zip' + S2FilePath = r'Y:\不同传感器数据\SENTINEL-2\S2B_MSIL2A_20210804T024549_N0301_R132_T50SQF_20210804T053331.zip' + GF1PMSPath = r'Y:\不同传感器数据\GF-1\GF1_PMS2_E104.1_N36.6_20210308_L1A0005524847.tar.gz' + H08FilePath = r"Y:\不同传感器数据\葵花8\NC_H08_20210802_2010_R21_FLDK.06001_06001.nc" + SNPPFilePath = r"Y:\不同传感器数据\VIIRS\VNP02IMG.A2021182.0418.001.2021182100800.nc" + + GF3MDJPath = r'Y:\不同传感器数据\GF-3\GF3_MDJ_SS_024986_E120.8_N35.6_20210509_L1A_VHVV_L10005638033.tar.gz' + GF4PMIPath = r'Y:\不同传感器数据\GF-4\GF4_PMI_E119.8_N35.3_20210908_L1A0000417337.tar.gz' + S3OLFilePath = r'Y:\不同传感器数据\SENTINEL-3' \ + r'\S3B_OL_1_EFR____20210910T022645_20210910T022945_20210911T064342_0179_056_374_2340_LN1_O_NT_002.zip' + S3SLFilePath = r'Y:\不同传感器数据\SENTINEL-3' \ + r'\S3A_SL_1_RBT____20210916T020956_20210916T021256_20210917T120953_0179_076_217_2340_LN2_O_NT_004.zip' + # 读取 HJ-1 元数据 + hj1_dic = GetHJ1Data(HJ1FilePath) + print(hj1_dic) + # 读取 JPSS 元数据 + jpss_dic = GetJPSSData(JPSSFilePath) + print(jpss_dic) + # 读取 ZY2 元数据 + zy2_mux_dic, zy2_pan_dic = GetZY02CData(ZY2FilePath) + print(zy2_mux_dic) + print(zy2_pan_dic) + # 读取 ZY3 元数据 + zy3_dic = GetZY3Data(ZY3FilePath) + print(zy3_dic) + + # 读取GF-PMS元数据 + pms_mss_dic, pms_pan_dic = GetGFPMSData(GF1PMSPath) + print(pms_mss_dic) + print(pms_pan_dic) + # 读取葵花8元数据 + h8_dic = GetH08Data(H08FilePath) + print(h8_dic) + # 读取 S2 元数据 + s2_dic = GetSentinel2Data(S2FilePath) + print(s2_dic) + # 读取 S1 元数据 + s1_dic = GetSentinel1Data(S1FilePath) + print(s1_dic) + # 读取 SNPP 元数据 + snpp_dic = GetSNPPData(SNPPFilePath) + print(snpp_dic) + + # 读取 GF3 元数据 + gf3_dic = GetGF3MDJData(GF3MDJPath) + print(gf3_dic) + # 读取 GF4 元数据 + gf4_pms_dic, gf4_irs_dic = GetGF4PMIData(GF4PMIPath) + print(gf4_pms_dic) + print(gf4_irs_dic) + + # 读取 S3 OL元数据 + s3ol_dic = GetSentinel3OLData(S3OLFilePath) + print(s3ol_dic) + # # 读取 S3 SL元数据 + # s3sl_dic = GetSentinel3SLData(S3SLFilePath) + # print(s3sl_dic) diff --git a/scan_data/__init__.py b/scan_data/__init__.py new file mode 100644 index 0000000..09796cd --- /dev/null +++ b/scan_data/__init__.py @@ -0,0 +1,5 @@ +""" +Author : XinYi Song +Time : 2021/11/19 14:05 +Desc: +""" diff --git a/scan_data/__pycache__/GetMetaInfo.cpython-39.pyc b/scan_data/__pycache__/GetMetaInfo.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5e6cf275fbae90f5f50cf9974a84e84ad1d9cf8c GIT binary patch literal 30380 zcmeHw3wRsXbtW(X$N`_=`zaEns0VG5#)G6NMRlqNCCi~GMxxkj$Vu}-d&>v61?8{|eD8{{Ur8OKI>gWQ5+ zliVu%acq{`Vd%^GJd`)aC*X@(~!Rs^Y_Vckm)`_~? zz~9FC=GYeG2aq4&d{b;Q^4pQ$&iTgJEy(Xceh24kW49vz4&>j#`MOvapKGV|8{q9^ zULf`xeB2Q0M%~-Nzn${3XVlR-aE0bkr&BMSi6`Q6at2BFq#T{b(UqK3aPU%gJef$T z!^n9K%tR++(=9cGSr1#01vlX#uN z9JMR*lUGkKm{5}Q zu|%h+7oLcwqN=LsuGHC-E+kXBdp4?`&&XV0o9l8Li((Zo#js63N8ni`KLB63uX9^D^P z560B!fy6|zXZEZv;v5m`m%2(7fH(?@gfgd6iPXI3wvoe!?vzBLk)Z>-U;oEvfAUuc zkB_|b^|J2gKl!6y-t(K|^sBc16(b?y%lG&%kWd{{PK~G_J0BC$F3mCJl7$9G+O4`Z z_j%`D$9*13Mlnz1#7kx>m6F5 zR#5M#c4%G=xh6-QBVB~rMfen)bnK*ZN*A7VBVDAq^3T81ajIQ&VGelap2sEe177)^ zM=!nd;_{QPEuX*mqgOtG#Osf~KiGfi)yFQs`iBEJaIGD)Q6)O#KORlUsU7~yuYGv= z!e>?%FRU!SvhvX{tvvRez0!2@bWDlN#+30`LfzqCdFp+aUVZ%akH5D3$ipjN{D{5g ziTLD+ysB3||FOKP3QtF4Hq!4wR?nhuC&qu}mmtHP1-g)!m5SzLN-`Chj-QN4p1F8J z9nd|g6Vch2?opzN$(U4RjtDcWvsm}=rAVbYgO$p1M}%MGoT@t$x-+gjqq_6B?i|;h zvhGx~(^aX!AapOCG1#vr2U?L$)q^-hpR>qW;&kKFf=>_9CC+-MvJqwDkQF5TF`_=%(f*bW}C3+(p*b;i)aqav_RTTWttmh9?gR?0l#{haqb4kd9>h~ zF7WXla5_%;)dI~a3$jagKjymFkuK1LDa<9=Bh%L<7d{5Lbhkqm-43_oJY-kG7j&dP z5O6s8b1$e?uqZC!D2tds-np|FCO^vMN1j}M`K2?{Gb>NL|I*7}S^nBrmLL8&X2XFPt}~&84p|tvvWednI(l?}Q!xOqWKneHuvRBT7=98|R<@ zUUaEDPha)SsT17;m?2^;F`ks;iOH)@ukI}Bbn6B2gp9c;xf8L|(~>tTqm&rRW?aQ^ zr&pn;A&GmF<8w1quXG%*C^a@X9ph|qbapnDkoTO3Ps>u7Q8pGkqYfwK7~Xhz?YWbv zAc?1yxEeb%Gp&2Zr<18zXQ3`y7cGg_mFc2vo=0~p$)u{gPR7nk-UK>}@VnAEv1)c1 z=e+0OP#*@Gss?e0qEO^65CKi3L1rwt=YK$NGRz&v z669&xh4+_h%B=yz$iid7g8Sm*c#F9nb-dp(;lz8(^N@)7 z<2TJ7!7+Oj%pSo(k9apy?R3#~dMxK2eM?T@k@J%&D_+YM4`R;h`uS$&?c4Qy zy}%>c=j*@L^MxGByde^mLABCQoq|GSzW<(Ke&=``Lic%!sx{t3Kk zMj^}8ss+#4E~g{i=5)+&R;$$-t?j&%(YV)08?6c)_u{PSwsUQx<}u(1AO%@>%iN=Q zd$JHR;4&Zzpp1Xy;OO#WpI`adr!vyaF2~dpb2G;i(fD)(fYbmlCWeB1V3eA}j3*4K zmm}-4a?&PDb*J(ymJBlx7oF~_Wd}z`LlHh%&(!SX)v}RY!}h#L<#P!zZ3{Gf2kJ!*h+oEqX)PMwacC!Q5_p(mib z39Ew-h_A*HsW=uchWuh+RJ3zCo>Iq?GqbnfkM4hluVda1fDSYBpz{Rq?8K!rZdGaRpr@m9u+ev+9W}GzFFIAV2Q_S$TeG<*7$k zUS7KVl`pS+atVm?^-nxxfJNg+fXCfgtB`s|p*0#n{_J#I)eG@3X(B4O5>uRvsf49c z$~!1qh^eVW$5p-9Oi)druUJ%O=%;(Iyh+7$A)eGl+y`LC1BdhiImVN!8l9O?Ftz*a zSagyvqi77$`f!{OWDU=lXvxlaLa)=1cBjw#h4igzAGDhSsB1eeaP2?UT$3Udw$%)8J zDix9A<0@xnX+4jO?u;2Pj(1XBetVSIUleqL6m)68zHX<_S?P2;Yh7hXVVxy3+EPBC zVV_gP(qBZ4O4O|MFg4PYH$G@1{L)QdLHGrKUBdr!)7LHhBYnj}!D;)rY-y+P6S1Pv zWu6N?WzOZQr1SjB&$UAkc?DU7Yb$Xo7jPePu)ZIR4A$R?ci@6+p0yV1VtwJ_SKb8H zw~+Z8#QH$y&)d*`E;7$U`zA667s#zxD`sR~l0oJrK<4EpGB1^V=`vvVa>DAg4qe@ODUK?eW%hQ>atovVR;>nj-mekbZ{<~h zUn7LysuA+{YlQq;c}3v2)o%ml-=LKf>c@Iv12BK3R;86O<_9h$jWRHQ%S~ZIA@#i+ zOxUs@q<#SuxG94ITTBF4x!^k64+Pi>1c-I0TBWs~cjlnJ)^n|l`f$vkzRhz_;`g)) z^@W!&e)ZtqJAd@bBm4Jthy9#<^oC#{z1edhpCtxzm^Y3Cl`zJq^n!E>SBrYG5Rg?4 zZyM++JE;4eMEG^g>REaxE4x5`L5bZU^KJZw4&z3whvf_q_As^BOJtD9J|g=;hLt^( zd_NJ!XodUtg(K)rDw{QQJo_PpfY+qrI27TK%nZ~#DFSc03s_XD7*0Z$9-TfMOC_i0 zNc2i2CVDvtkTN~*3U-C0Le;=-x;Sm2H}ABCo+J^a49BFRCzGog$(TNfA+r%n^Bj$Y~;sN&E^a@sxQGy(|kwXrd~=MI{=MG?8;e z)*ADJRPftG&J*Eldl#j$NHcro3>e-`rFmE(ix9G?;TY;k;^ah_v4QdsHRDN`MF*uh zbML39k@`ar8$196u;2k5v!K8Y0MP7gc2x?15fIhFFGZwoklrk8utNx^@H5f3!&Lwg zUV!l^k7K+!Vw^y5MvR~HK#Y6l`>&(2({EUeKMVyIz4~D`#M22Qz_S`NulSHNquK=tL6GMsqlOZ3M?+E@O(0fSc^j{JfBQZ zz~Yb!&nJ_Rw>YH2^T{N>Ee@&hd@?EQEDovgd@?EQEDou7e6ruhfiMQA&BkeAPP>iM zDsN;#-oe6t6ASnb7Vw)iQtfrH;NPURa=Ha{ldb+{h=k8X%gJ(y1o>@@*e8-$y;N?AcaLnh9ROY!O{gyrVTYmlAF&_vN zuJ_!FV6x0U_x5W&cc}j|?-y48c~S*AP|vei#p=Jd1&`99wJ&&50nDOXjkgoje{EWO zM*UX}_1_tQzm2T^t5It;xD~MaugFLntyuk6n%;PBBY+?u83>j_{ns`3bG(sP0l{7X z!I6Up3?SGGWgwDk0zqiGP-*$%7cc+8gUc5l{r>wNy!`bKzW&nF%b$M2-*@T7&#ip< z>6PbR@b?8~&;d#h9H#o0Ui<Xnfj28DGYYY{aWMId-8^Dfw5yNNJ;=WNFN!l(0OrD_>Bg&2}qEsFu z?iw)CrmGqO(xm5QV;PF692VEY<>l9+E6#%FJRq8fK^d3k0nrf};96)ji$Sxv^cX5j zC6g1qk=Yqjb;UX80I!Ent;sur`Ijo8N$Ab0ubMOXw3(Gwq{#9c>Z=@;67Z@(n01v} z2?+P5R9N{~wES0ozJeSj*{bvHaESsrLYQi-58-~~;NQ())Zj}~J0-e+e}(x!S%rVe zgzX~t2ie&GmpjON57&WP1Y92Agrj<<;6?-22KQsa#UYrY3uO;#qIMb>gfK9ty+{`# zEz05ra^@+Ly^C(bBoD!XhD_e1a9ZUU@q}F-!E98b7A}h9aJK~WGSXf$uCoc5F(wo% z#lV{7TvAS^H#+x{bFlVk`$kJ8m@#bTuf6bnRJH3dgNU?)1(G&oMW7qA!sf~{4nf!55@rc8M? zng8n&e$=Q#jkmBK4lfOaUkZzG%(yci9acooT zb8BMk?NCgSs>;tu)~gX>^)r(7YJN@=lJ#osoNnWEwmzw_HfoJ>JAJf`Y|5lmf#%T~ zw0cv8)uDAj4fUE_#b#`Q-5WYgHBCPpHSYP|-hMMff3MjD21v|uD6rZ=72 z1SG;oMk3^Nvt^F7BsL^+`GYT9{=gp%{L`qP=tah2H>-#;#Y%+KHEbp{QibGd6^YBk ztTG8}Be3zF5@gtMJDu=^G(?P`{05zP0})mSFsVRR z_wJmGC1Pi0mD>dxJoAtd#h{d;!}MfM#S8XVqr za4>RUG;-J2z5(5J@8IZ_LY(AE0THaq9nZQ9%@xBBz8QnyW_efO>Eh5`Bg6X{WauTZ z5yc{gBaf_C&B3KeiA+Pw6v^nLs_jfmHFf2W>5SDN@o&Hms$?<6Q92)KpkShjo>d~0 zCG`>O)jFH%RXfXEWP>UJcnLv$M#5Z#b*h#>q+~0B za;-?Hg@vvKu%+2q<}9&nXD+mM7P&;XNa{G$Fh139637(zER`B;V_(Neb3g?t)iR($ z#}5;r;zMr)RKW8Mkb=cL`M!~u2ecs1NP-n?m-4Dq;w52j3UmSiMJCq-ArSt=EjGC( z95J~iDwI-&zYL4{QWf545Mia@!?p$AI{`MXvaEoIhcN(*>@~%FnF{re27fyEsg+Ju z$VCwIbSyT-ydRLor&dBeW0pe9o8>I#DOz8P^1>AN)q?*gC(1t z7u-7L0=i_lxW05fY68Zj8*r?}vC+&o;aG=!GmiB*ZZOMR%zUd^?w1=_xmCqVtVV_k zRSXpx86LoD1W3@xu%U`!L!+&J6W~D`tGH_AW+)))wK};ODS`)8S|#nJfV!(4$_gR% z2&`1tQ$fNW9{P0}_C4SZw3{%YenB|<*MJFz!h){VQdoFRg$1`XVS=Hs5J_>7feCGM z-^OgW!-NMvvb^xf%I`mT`PF9(tnm7mUw-}NPeILa=g8=2caSs}mPamJz=sd*+SMHj zY`qgwLiLX%r=yC06vAs7eikwR=-HGSoAIY+qvNr0B|4#Y+LaM&8nrSn_&4MCWSEg? zov-{Jk&h9fHT)GXrJe#AHf&3KD9>W~la%^>A`2jTnc@CJ79uhtWo;C?IE42jtv%Vm zqkG`q0|$}1;iRIT*iAk%usdb_VD#F%V<~kl{$O-Xe%&bi$k*t|5axO2q)*d$^MrXr zo-kKFLzU=_%%WR)mQtT1@_8a;1yR00ghaCP91#-8$`^@{C|15igoLp2M?{_{@?|0+ zBD9)UcwK)Qsm^Px>$B4LkEzWIM7~1gt00n?h$}2JB`=f>MpAi+ieD!33XxZdyhdb# z2rudX6{Ye_NThzzt4yQdT?C)VMh90cy?PZBPcxUxCOoQJW3Ov)sj}7Y5ba&a?>Eb| z_siT>{o1&wY)D)0)U4_6Vl!i91uX$A`XLgiIs)00q8R{53pD^_C1mLvkgkxdq6_la zg`@p*=iq(dABIms5|U$KxtwhJOTJ3W=DK zIuW0RxE&oH8Le5w;=KOsR z#oQX}L0Si*AH!|IiN9XcTlCH_{P{XWOo5!Pi zbr~L3eP+G-JRa3+$ndZ#H0w3y@u*&tT*tnR^;$L7c@6B_SkFF;4ZI?(XY+OguL$dT z<=0@V--z{Jvw4r2=pHFb=2>sRv-ajZYdCP^J!{(KBBY+Nn-xwe^fA3Ao3&i+ zWYu&V*68Fd$t}z^r{OIrrY&ztQcZWvk*bTunIHd*eKN}*erEZ?3&FM2(^Sda4zrfr zvlZvcK3hSK{A0ekLv+nuGgKj~s5aJ;3m7tMu;<_LozVX5-eTolXWv$Pjonc-Gaxyhrq z=@2>Y7uy(8adbXDgH<7|L}^QE>JDg9vHtY3X_mQS3SNnnn!k0e0nRG12ja2Qx6MRn zyOYqHPDju7oWy{N$Fn;&t_nv}^TN@ttX|r%duO%C85uo)%e78&YA!y0vU@t2JlQjg zbq%SV=fk^ZXzv9!!bFbT(RF~f3#4!lC}S%*VlO$O3&8+;Oxm}k(54IHN{#&l$LPtd zg$A;#%{(+PNG-CEz_vkz9Vqp}`S>hv`Y4p*v#j#|47Kx}adUIG*14 z3B#xIRx0-Ju3nperAjU5h_FNDT840&Yh|7xTo*AC-ezH2?q)7aFPMqV9FHlyj>gU^ zs10o1m06?qf2OnlCmLNLd{`r}Al#fKyNG{v#>%{KG`Kz@@DU;)2i!z#I!F_F3xsKk zkc;JIN$Ons;v>*@_pP8Vku+Ejtf7?!6c;xlmlN+^-5b3fNr2p1KU z@EN<%sco@pPdmZ?>E?1;up0iBShRi*W6uE#r0mFeT;A^nSa8p;0Tvc{lQcF?zkaE{cs$Ec=!&DeUwaW_Q zvPGzHYMGty>b~8#x<-k=2BJ%b9qK# zTn2@44J(Y-szD0l+8l*(9TczStT6T(3S)>g5g?{srfu6UMbEo;6&VGx7hsGb1w>ne z(F*sdT5Ti?I76x$SfQK9&dp-B(DGJgZ!%}z#sSY_W2TQZ$f^vyqOj4TUgP)mF@6bG_u2Q)jB}8Xd5}bnbVs% zO?G>uekTCf7OhEZ#`cnWZHs&htn!WWt-QsA6!2YgH^9658^H;*AS#21}g`mO9i;8UhM2SSmHrMhgZ@h3U?7ov`EckrniX084>< zJH7#wLTdq~Yk;I*&30l2MV;>XlF05rDAFI8!8YO$=fXL;UMqLC+JHv~$M#~M;enyh zuD943@OL0_3}b$WO&Enp7_GH+IBO>uW~g(u^tK05aR}YpHl^_2FbvSTkQE<>#UN`Z z_&yadF#Ic|<{J~i9q%@fUE~0)0OL{YTTLN3k(3&EYj9)_L9gg!EV3(=iltH%Fl4?c z$o>Rf{{Fyx1-hW^wJ~ff9XDDwWlaf@!?B5&f`BElh+$ZfiuMm4ij3{rJv2BvU(Riz zKZ;HpEoom1#XT`{<@*PR2M_NWJ8)nJAK^`^l5hWX@;J_t<&+HGGZq;+G>TYBqhrcn<8EAmXmlt~68Qv?j}uu7NH$`> zO;@<9x2vzKziV69K-cyz+>l_fD;Ub|m{Gn%&9jhgbH0h=yHx&PK=cynz({28;J%?< zV}pAYomlP^Of&gb5H^62dQLAeBDTB%^T#{rtVtpS{*?bp1ey?sGD{>6`uz>@*1~_c ztnaSyx76kwku2&HmC570w^z9x1$uF6?l@yJxLD|3${0QqH=rhB`^(=^@4rptJP4xM z&>nUxAd}(H$_lnTJo(?L#k+|dCNfH7jL2O?ju5$<$UQ`kfk=gjJwQ8mZDx&kQeg#% zZikNiYke<%$f;T<%6R{7H7x(Nuu=d#Rv8YFw7<0(oNIXx_(&6lPl@vu*fMNDlOd&h zHSh!gH2^2=^939_OfLe!l7!`B0*e~&wp15CGq;lmgA z&5=?;v7@((VNp)>o*ZB*T>S2}fGM{wTmz7b*94^AML0(00a9{lBtQ!N6$4V0wNmQ= zQg|XTASFk|8v&%`sAvJwDie@aWdLax1JY^+q+H5?l*@AfX*C1VwQ3L`B|w|?U?5+E zwE$@y15!cxao)Yl0#eZiNb56TvRZA(0i=!V0#e4HPHkEQ_yZ`dm#fna z_%>?wiyL_B5rEoM%T#N+i8&@5Wll45Ojybs03)=r;3>6gVU7t;xlZe9|(#*8gR4A&avYPnHv0${7nz%^*Q0M|CJ0oU@kg7_J( zZH8{^7QnR@xz&VgXv=V|4Z5uabX!{(uC=QhHN<-~;Torn7A9QVa&8O5H5@Z=%|AzW zaht`*fNNp@p`ksr|6}EauU`51!j{;}(B*vN3w~8}h=UnxjnULpZqKWtfd>!6mh*t?@VL-gi zHi#EKG<2gNPq~Cky$-bdA5#D-j!R0-KpqAY{%L8JjR*%6L>_yE|3tBFp*v&eMI_+Y$Gy2 zWIG7p6bQ8lQi~w92vUn6wFpv+Ahifmiy*ZKQHxM;zA&^c&=uO=%R0Dx4A!wO22)n( z?h*7_7j!9?sg$fPH|KD2Q;2IVu(kE3z?O1_Mo2hI`2mq161gd4b(~sc;FV#k93fsF zp1MIO1r&9SJrMVo84761yLW<-oYA7?(;;9~PQGgabW(y}Nnh^KTYJrWwwibw# zN|FCeMH@x^%Z)9F#*eI<23mpvdzqhG(uK4i(^C@A?usPG2)Am70%)qQANlK zXK{?EB4mxTI8}TuvcXv#BgP1nV;PPSS%fTZRvjar2*o+EI7So^(|3}`Y{U*Bg_c#v zh!{d!nJtbHCxq~X#krP!jADOSHZh6@XxYjrR)}FwGwfqMr|`kn4nzWhyl;kQB%%N! zMX~=V4u~Q3H$dvQbIm@&b2`5^O2=Ka-HpSAHm$UN&VL!|Kjq~=Y!bx33-0Uorc%sjl_R@ z>vm<%d&lm$>)`I5J4f~#J0AC7$n)+V#0fRyote&V5Ti!KT-|2|(a1k^a>6bcNI&@jr}sLR0`x`FKazq8B=$Hp<(Y~FtSEIH@R}cQD08KMic$)Z6p_4z|5{?G zHD3&+so`%C(TMyvB5RHQK`O|~lOIv`$3%WYF*T0$keD$lOekD;<8o|w9Z zZHFEF?_{fvLL%h@XhI^T5{q1j1m2}bp|!}-zsyBTZ8Mk_Ma>Xa?L@q;Yhh7A#!h1w zDwZ($;pRF~u-R#Tj?v@@lTBb`gvpOFV%TY(Q61-DWjT+{HfaiF>ztxM$*`Fqw5QHL;sUGdt@yv9o?N+ZLMGw$RMBg(kKwz_B7ze*^FFqtH~@Yr3dZULoXYIGwq2%Y(tq%CF8PQASDwqY|%JGO&K(- zB^}lwp~%eK z8P>lWIV+B)4RbIAA>d&Pp1|=G@BnNqj=BJr@?N@t_Yuj0t2}s;UF065di0=_|4HP3 z5&7RlZvKHBqh=2g@nuK_QaS|ymF^9M_K+s%FA1d-nFa)k>{Xe;DFr4@aU&Ey;T3l- zUZKbd#wUMydtKC$QQ@zNS;SfcAH8kotLs3>e_Q;7tGG)!D5)Kc3`+VtW6Aw7LP%I< z107}1kZ^It2>nlgsKmlcntRbeN5Xv_jFk*@bPRlo++;^bg+Mdl0U;FvLjfD5W>57l zI1_))d0KqGkMl2@ZS6>i+TzcYz^}y(Bm}&}POyZ8Xp5tPglMU4At7>e%|$}x%&{XO zY#Nt?Wpsi8*#8gct{?V@E<8m}5smEzGeap;jOvT8hIk0NTuWsEOTH;UU3%!adE5 zhng5CHQVYF67n0ar-<}G(Yt_(fP{FNj_6&6>#6HhFXUrej(ikSN{)PVXXK-hdV*X} z5$aQ*MY`rPm+IBnlA%K?wq>MZn<*9BAr+A}TCh}XGXu2qk)TPlVPsffcr z2dQZ6a;F&W#!`I&Kht?HB5TK#>r28n|AJjt;@yvNu*6#}=#>9MWm&E19&tCR=Q>IvLJT2mK5`eawtZYW$Kv77}kb-#VGYHRNL z`u}2NI{pIJOcbuvkvY0g_wG1K@%0ye_tGniD-V5Y`Qy(Rk=Kusz6&I{t9#F$y`k>j zzCiat0DFW3+XH=pVD}Kk&Ikkp{r$oI9t39XnVg?HaIOAmyW`O#cLoEY0|)o_>F^obh<)k}VAk~p2Qc-5#?%|YlL!n10@U5{YjsCapmEt8u{bh18o>?SKG2Wr4+VM# zaQ%Hf=Jodlw$t^)H~z>GBB-G#Amy_R~%?HN<{k`Gd zP$UrChWXjo9|>>6ybbpTB16N$$f3yaScHDgG)-@OD7zw~G?c@;$81CC$57~ZVGV`y z!B958Z49@3dmnxtJd|yy7~EzJC1eeymxj_QC{?&GN;OF5J0S~<$y1c_5~&2ipPxuP z5Lci*-$<+wkqbl~C-Na8UnlY{BHt$RJtBWku+72%j+ZM4##$3Zng{Ih-iYeZn`R5{xj8&!J1P8B? z3R3KZh9m2`CDGdBC>5H~qv@|qWCHgY88dDlDu;wYbnJFo zbEfD?k-Vl4xvJ%CXmp7sK37q#xIuicqfNY@>4Z3guq^i=EsE3PQ5-jmQ8u-bxfOwN d%#V>0+_Xg!VRUGHXQqG;ES~Mi^|%D7{|_wA-46f& literal 0 HcmV?d00001 diff --git a/scan_data/__pycache__/__init__.cpython-39.pyc b/scan_data/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0af127ebbedc22ffdec62a4930b20c9d84ee8a3a GIT binary patch literal 209 zcmYe~<>g`kg5!466PL2?cSH|_&afu%ZEx1h?N8gmWiOQ?N}*}V{)m|tcd83VGpN*3M!l z2c$fM4GXYxDrd}?W2>Kk^q;?I&m`C% zrEnLP6iLEU`{}jE4?e5jd{kY!{^Y?2K&)PS>*(>v4_DS6{=T988&wN zlWUKDUS0lmt-M?-KdAlU54DxwH!4}qf=MRxCdrsK?N8Qj{rK_2w^!ePRK5CU?UQ#J zHD|Lkv)feHKYnkUDlut+*!}*z6w(z;i(ngp|Le~HjHN=JXy^6FqDdTg(#p=8x>Cs6 zw9ixAStD%Q;*a=%35YB>M8zG^!OHM^~6?>Jjp3g57{2dddTq5l!r1N!XBa@ z38W2v{OPfw{)uU@II-Z6`H6gy&N_CnVAwfh0_WTbH)Gh7*r3LQdC|z_EweXY^fd0> z(c{$V;{oP&0T9$?6hSd0!@CFGUZ7*B3y}_xWkjw~f5Lz_5SbMO zktvoXCRN}zF#)3TNJ-{0CWB023dqE=OnG$5-NF*g@HHjiE1Y^GNka@_5ldKJk**&s zg_t-8afcPmKM99d;69!guqq3(umtziZaylwKRF^G|GfZ^h01CLFjgVfv_cWw^pCH4 z^=9?{pD$Xu+PiN(zJIU!r+d{m--k%5{_dC6<F)5e3dSqtll zKz7c&NXHy(dXh_tCp(~^s|zGc&5Jq9Q!!?B^9s+So0;CZv*U9P&oqJNS&*I<>%aJog3Q1jnQa1201RuVlxoyr8teV z_!7s_a)Lp10ACU;fmN(R{@sLFTb6hpE+uGFxtS$kW)q9gwcrTkW{#uX!n}L~ktB|l zn&}SKydmASDQ|;YQ);ofMhj@Pe2GRgYbv+G{FZ6azhO<~Hn>G`zAu-;m*STamzuWi zW&(C|M}0RD+W{+RrR_{*txGV6skjOJZUfv582Gpaa0l%GY6ri`aQ|E3Tr}Wz`rNh# zF08NA1)A-Ey8(9q-dP{-0^AAX9>86Ich}{6>f^n2c@lTCoi_#C%DMsWWIaB;%cqk* zz1OGr`1Ed{ey+dkc6(_bYh&%K1NX3ftc!Kx9-u-@VXbTj-i`O%N|g4q{WnDSDpd8o zIC)DvFQf(9$@VYtY_^||WPx_w5R3mnLg@e!7GI)#tp}D6*T7mp2Qx#$1;{|91D6h@ z>-dOpB*;no3V(x0_mjoi+kdQHe|PQjJJlOkYrnd@_V72=m5*!h-C6zg{_6cZPaa%- zbv&IueDukKYnzz}%H-IYfq}!vjvRW`fn1&(cPxV>(~t%%3vP8XU396LOS*X@V`hjk zO;cOaR?eEt8+0}(Y4yrVXA8M0+sM8^l(mB_wPYz2m&;(iicEjTiDe(@>dTU}#YrKQnq^Ay7sXV{6FdnU z#Z*F%22PvYJMvY&wyy#3VuO}bz~e!?;x$ci=L}LDF`ZnR62conPxX7ok@S$>21e%0 z3^nm{?_Do3=vWqTVLt|WqK}{3IL>i6=!PNR4=)T`=<}mMoP+}Aa}z^Z0wx?%95VB8 zf;=DB?pA8IuGQ{W*6w{;yIp~rx%$DCpd1Fr-rL)o5JCo}6M91}H9$(P^ z~Uz;geI1fKK`5E z<{-tvW_Xcra6gn+c;l?M=7Uz`HRut@s5wnX4XU?&feH6!EB7q_39j4Ts2gy(@`l|} z-g$4g`<&Tm*XMr0ZUYyT$uaJ`HkWtZNu13@ZV;jW2gs3#17u)iZaj%6q7{czY@$q5lKbxN-!g!_$YX#1QMUg{1e6h z`JW{ISLUBAe$GENT(_jvabP!q{|WaB2fU55T|20zn8u6XEA8; z6vv%0o#3`eq7AF?re!~g&Q literal 0 HcmV?d00001 diff --git a/scan_data/example.py b/scan_data/example.py new file mode 100644 index 0000000..101bb41 --- /dev/null +++ b/scan_data/example.py @@ -0,0 +1,165 @@ +from xml.dom import minidom +from osgeo import gdal +from PIL import Image +import numpy as np +import os + + +def uint16to8(bands, lower_percent=0.001, higher_percent=99.999): + """ + 拉伸图像:图片16位转8位 + :param bands: 输入栅格数据 + :param lower_percent: 最低百分比 + :param higher_percent: 最高百分比 + :return: + """ + out = np.zeros_like(bands, dtype=np.uint8) + n = bands.shape[0] + for i in range(n): + a = 0 # np.min(band) + b = 255 # np.max(band) + c = np.percentile(bands[i, :, :], lower_percent) + d = np.percentile(bands[i, :, :], higher_percent) + t = a + (bands[i, :, :] - c) * (b - a) / (d - c) + t[t < a] = a + t[t > b] = b + out[i, :, :] = t + return out + + +def createXML(metadata, xlm_file): + """ + 创建xlm文件并写入字典 + :param metadata: 元数据信息 + :param xlm_file: xlm文件 + :return: + """ + # 创建一个空的文档 + document = minidom.Document() # 创建DOM文档对象 + # 创建一个根节点对象 + root = document.createElement('ProductMetaData') + # 设置根节点的属性 + # root.setAttribute('', '') + # 将根节点添加到文档对象中 + document.appendChild(root) + # 字典转xml + for key in metadata: + # 创建父节点 + node_name = document.createElement(key) + # 给父节点设置文本 + node_name.appendChild(document.createTextNode(str(metadata[key]))) + # 将各父节点添加到根节点 + root.appendChild(node_name) + # 写入xlm文档 + with open(xlm_file, 'w', encoding='utf-8') as f: + document.writexml(f, indent='\t', newl='\n', addindent='\t', encoding='utf-8') + f.close() + + +def GetJPSSData(in_file, xml_path, thumbnail_path): + """ + 获取联合极轨卫星系统(JPSS-1)元数据:NOAA-20(Joint Polar Satellite System spacecraft) + :param xml_path: + :param thumbnail_path: + :param in_file: + :return: 元数据字典 + """ + try: + # 生成缩略图 + in_path, basename = os.path.split(in_file) + ThumbnailName = os.path.splitext(basename)[0] + "_thumb.jpg" + ThumbnailPath = os.path.join(thumbnail_path, ThumbnailName) + + gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "YES") + in_datasets = gdal.Open(in_file) + meta_data = in_datasets.GetMetadata() + # 取出子数据集 + datasets = in_datasets.GetSubDatasets() + red_data = gdal.Open(datasets[0][0]).ReadAsArray() + nir_data = gdal.Open(datasets[3][0]).ReadAsArray() + swir_data = gdal.Open(datasets[9][0]).ReadAsArray() + img_data = np.array([red_data, nir_data, swir_data]) + img_data = uint16to8(img_data) + # Array转Image + img_data2 = np.transpose(img_data, (1, 2, 0)) + img_data2 = img_data2[:, :, ::-1] + img = Image.fromarray(img_data2) + # 压缩图片大小 + if img_data.shape[1] > img_data.shape[2]: + width = 512 + height = int(width / img_data.shape[1] * img_data.shape[2]) + else: + height = 512 + width = int(height / img_data.shape[1] * img_data.shape[2]) + img.thumbnail((width, height)) + img.save(ThumbnailPath, "PNG") + + # 释放内存 + del in_datasets + del img_data + del img_data2 + del img + + # 生成XML文件 + xmlFileName = os.path.splitext(basename)[0] + ".xml" + xmlPath = os.path.join(xml_path, xmlFileName) + createXML(meta_data, xmlPath) + + # 产品日期 + ProductionTime = meta_data['ProductionTime'] + StartTime = meta_data['StartTime'] + EndTime = meta_data['EndTime'] + + # 其他信息 + ImageGSD = str(meta_data['LongName']).split(" ")[-1] + Bands = str(meta_data['title']).split(" ")[1] + + # 中心经纬度 + productUpperLeftLat = meta_data['NorthBoundingCoordinate'] # 左上纬度 + productUpperLeftLong = meta_data['WestBoundingCoordinate'] # 左上经度 + productUpperRightLat = meta_data['NorthBoundingCoordinate'] # 右上纬度 + productUpperRightLong = meta_data['EastBoundingCoordinate'] # 右上经度 + productLowerLeftLat = meta_data['SouthBoundingCoordinate'] # 左下纬度 + productLowerLeftLong = meta_data['WestBoundingCoordinate'] # 左下经度 + productLowerRightLat = meta_data['SouthBoundingCoordinate'] # 右下纬度 + productLowerRightLong = meta_data['EastBoundingCoordinate'] # 右下纬度 + + # 边界几何 + boundaryGeomStr = f'POLYGON(({productUpperLeftLong} {productUpperLeftLat},' \ + f'{productUpperRightLong} {productUpperRightLat},' \ + f'{productLowerRightLong} {productLowerRightLat},' \ + f'{productLowerLeftLong} {productLowerLeftLat},' \ + f'{productUpperLeftLong} {productUpperLeftLat}))' + + # 构建字典 + jpss_dict = {"ProduceTime": ProductionTime, + "StartTime": StartTime, + "EndTime": EndTime, + "CloudPercent": "", + # "TopLeftLatitude": productUpperLeftLat, + # "TopLeftLongitude": productUpperLeftLong, + # "TopRightLatitude": productUpperRightLat, + # "TopRightLongitude": productUpperRightLong, + # "BottomLeftLatitude": productLowerLeftLat, + # "BottomLeftLongitude": productLowerLeftLong, + # "BottomRightLatitude": productLowerRightLat, + # "BottomRightLongitude": productLowerRightLong, + "boundaryGeomStr": boundaryGeomStr, + "bands": Bands, + "ImageGSD": ImageGSD, + "ProjectedCoordinates": "", + "CollectionCode": "", + "ThumbnailPath": ThumbnailPath, + "ThumbnailName": ThumbnailName, + "xmlPath": xmlPath, + "xmlFileName": xmlFileName, + "DirectoryDepth": "day"} + + # 判断字典是否为空 + if not jpss_dict: + return {"code": -1, "msg": "没有满足条件的数据字典..."} + print(jpss_dict) + return jpss_dict + except Exception as e: + print(str(e)) + return {"code": -1, "msg": str(e)} \ No newline at end of file diff --git a/util/__init__.py b/util/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/util/__pycache__/__init__.cpython-39.pyc b/util/__pycache__/__init__.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a60cc7a58ab510de03a6194e74876a052f883012 GIT binary patch literal 137 zcmYe~<>g`kf)51;53ed}dx|NqoFsLFFwDo80`A(wtN~kU5`$ Gm;nIWS|CpV literal 0 HcmV?d00001 diff --git a/util/__pycache__/file_store_path.cpython-39.pyc b/util/__pycache__/file_store_path.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c53f3ad0e22e98b91fb0d10817a1f9af56583890 GIT binary patch literal 1620 zcmb7^&1=*^6u@UP$!<2=t<~0wdI`cFiniU>&j#@Wd+0?kRnV{y!%noRn`D_x&@J`U z9txhNpd!1VLP5okB8vEr<69+ z%`zuTlM~|;6XVCsDQohiG3~__%h&?v(4`6%-H%3~hk(!pc;gum9C5q|?hm0QyoB#! zt~AIZgmDlx@m+yNF+_u9kuV&?ILg_a!nU%|lzeQZ3bRU?-&;rf;o3TPvGO8k(g z8jF&UrmFZsnyW??w<=X7ic_`b^0`!p2g`Ll@Cp~SNJ@MY`M{npB>)`#_O z!fi^@tu{ZaO`f{k1^1XEBDc)ei`C8N&%V5W(}uqj`}XeQwb5cQTBK%i#wuR4in9{^ zT80U)Qypy8ge@j=hvkG21;Rw&Ys4=6_Cq`0;f$v>fz_qJ(ul;nOGDS z%JyM7+JP_&lM>+(Q2vfk!ju`?Bmh%QuE3VxVQxHH`G1%@;j!IdS>j^i$k`qthJ@&e zdT(#kK@{>i6IOP@9){75uzwZo37GxYuq6OlsIok@B!ta909K#=KY-qB+I+D5r)usc zBdXxfcm{j%P`7>X4dVCv`7#sqHm(`@L%edX99*=6CdK`|@xH`QtkE}pIfl}9UcSw9^_ bG^u)L#5ENQgQ})uc!+N3dHE0(_Z|2SYm%O) literal 0 HcmV?d00001 diff --git a/util/__pycache__/http_file_upload.cpython-39.pyc b/util/__pycache__/http_file_upload.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6b6fa5abaa9cb33500c4052d83b8c4dae0db08ce GIT binary patch literal 1953 zcmZuxOOG2x5bo}Icsy%6k988nOFrx(Swib<2mvgF2y7xnLMucfg^X63jNQHK*`0^e z(oEKY?G+SC&XQapy!;JC7x#M^jzhQ`22vRecrJ>pp?; z$MuKN9JD`iadbGa_#EEiE(lHn!YOA7OQ2KRXN9u1CjpD*1b!3kdDb(l|b>ndp<+rQvn z9H~JrTdi*E+c>)sx31>dj(;sqMGHo^b?)rB_0IZw=Y!Vz`R=(-{0&j`yZ%$~p0q8! zyd5VZEL1KUFywNa|Cs?xuOB2AgfFt(1Sbzp~Nophf3)F84aaF*uct zM^TnW9iA4Q(J;v){*=PCPdSJ_Ll2$}45|h1m+%(nK`2s^DFsQV^er-Bik6JiChSct z&PwYvnb?Z$S~I(}=M(}?AQ;xBtiZqfZhe)vR#)4OcDCVCrL8yvQH%@i7K-O1rJdU{ zRst-nY?Q($5Vnkz2t1S_ZrVzu!(WIz0Z0CGVWYc+;IsboLkp!kc^!rrTgMPS2gjLDWl}pp*qE@Q#`jaJ8de!e>U5tQ(DT$Ch@Z~ zsh4%AirS{cB9l!J+BO_^3}>~4EN8cSF;?B>(J3%^^3#upzyEsp+k?Z$vnTft{(A86 z@5kS3mhS;cJgn+XD3(QiRZWo>@jld(QL%9VU&cexJ_ElL+Ab8(u2QJbSlPzF>qUjY zk~6g<@lr4pVozcjX?u{vrxjFPaVIWR0R3~le`aVF2;x`t~jfm*L# zy3-d!73Z1ZmUe~`C@q;2%ydQ=^6Z_TLB8x9NzX`8N#Uk3scljO&T>+$nB<`8)8}PaUSux$i3l}Xf47|%% zzPc1{T=^#OzPNbRXtIN6ypIA~-Xibl*pZ-MdzFiJ^J$v%Q6fIWqAjp!ZKvrVoEodK LCT-Y$#q<6J5smZA literal 0 HcmV?d00001 diff --git a/util/__pycache__/http_util.cpython-39.pyc b/util/__pycache__/http_util.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d5727802ae0dac092610b09aa8731f2fdf931124 GIT binary patch literal 3421 zcmai0&u<&Y72cWs!6ijXlq@@ro3u{a0%c)|Qkt}~Py`8*Hm4dWlC~%zbg|~Ft)w{H^cY6 z_q{h0HX1bx&%e`8!(U#ptS{(e@p19-2HtD}AuPfAR;OrLhvA#|`M~bjtQd1T4#w=h zJMcOlv)-|UBixTI;YOAFcE^vZ!n?=iY2=HF@I_VBPPte);gi~#)me$^(aOX+vqb%Q z>po-F{HxQziiTJfYcY!&qIqIZR8gyaKWZOe(FSa^g}Qnt?nl8eREOGC>Afgvx!NAdzNFYlj|5?mK17#6&_i=H#qqv{H+vak z%*Iw=jX5M6JCJ;AL)v2(5|M)k*{zlo1=_`rAi0u0Q&8 z^6-<79{uUB<>bDM)JP`#K9_O4n|8bnn!c&(PJvG%n=P=@r;lp)wlr=FtKO=|PaFo?^;Vj|>oJFkv zY71&5`eB+)X%@;{5j0XC@(DEd*axeEWfr4Xi<47s&p`wXs+X9R@;<4F-%4z5l^t$p4D)J zWhqFs0fJtv`LD3762~yjBd}i;gmQ#vCUme~eSeqpcOq zQ`~xrt4@ZDRpw@+opcx_(uRtzcZ;kI6lLvGfuGXBoX0*HV|FsVBpgJ#G7OLUX(%)s zJiAy|jiYI`#2;pKp|G3{-UPjU?l6wLy~6U1C8h+V-ddw;4E<3M=vojAQZd5CEztEK zcz+c3r!zJxrzG~v1_|1~IAE_*??n>dBJmOlLmkR>xlLkTWlWDQ8Injzh%-8-eExF9 zuljYr>96>!e#5W%HKReM4Z=7Hg8bWL`hL5;d-Ij;-5>64L+-q`zqj|wPCFdN?S6VU zPIRT)k5L?VbtU`oX!qvLy%yL0D2wDRa-=^DvkZ4TA&KXv4Cb%@?`=ig-ICEDRnb-! zC0U%@-J;uV8%JZCF6P=Hnfvej^wq7Mot@oTo*Q;hg=saVN$l^UuRVkxTNHjI7IfBU zSF=E+Y5#WAR|R3b^mZ@mD&qv%CTop5>8L^}lyRbT#at4COi~0o4lZ&(xX9Dm|8Wu0 d*C~1#aoT4-t8=_|^T=lo^Pkjslh><7@4tsv+C=~W literal 0 HcmV?d00001 diff --git a/util/__pycache__/remote_sensing_util.cpython-39.pyc b/util/__pycache__/remote_sensing_util.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9adc1e81c7a474e871059989c6ca2907c9724a6c GIT binary patch literal 2803 zcmc&$|8Eq>72j|7{my5f4gQ8}T1vzJF~62OjQVSTLjCq;ZZYR7s`6WRHJ|zH z+xOniym|9x&0H=m!FBm>@0;%pO448XL-(b^!<*2&uLB^dDj^vu4Y?|dUa2b3s}0rG zs+t|E#$*X;D0Wk-#&H70uPbC0C8|l3tfp`rCvj>+0S#qQDyG(7OP}%Sic568qJPtJ zu2}k_>nx`)TQ=6=s_3syy;?4n%cT=~`L)W))9D%P)hg+yFb-+HsIbhc-7u(YAge}M zh9iFhVNKyG(e(=S2cbsK~eVhvq;1N7zx~#HY-u8PqNDDB3c~IJr!@*z><%T8HGc1Ke5zhl3j(9Kdk%;#J zAB}hc_@RjR10Rd{0Pw>RKLGql#18`3BR&XxJmN#ZkD}pV6y`Y+90D8-#sohs_z}T% z!N&zZD)V&vsg$ z-|Th>IlExem5w`I-~Ht;+CSfH-Fg7Z@Tz0j`t-xr_kQ(-=9gD|`>JDFk&|xoCl5b) za_4^Q?&tr*3BlASPDKo8eehhR2kBuI9;;reI+lNxp?%U^bzbGT0DOlnadVb=Rx z0Cnkw#2pe=dPT8dwv1} zqCb<5tt-Xi`bi$+3zrs+bLH~s$y3Js@spFs4PG{+A5NlZoNTCqS<5J*tHiaVQC(?S(n%*6xew%$Kqm@X5uxnk88Yi<)lIBOA)p0ylw=G&Nn5gUMoxrS>LqiNSA z$Z|}IJ(jxU-spUR^-Q~s1}IprGws4E$K%a+wALb6BiNELik7A{LB{#cD8l%3?Rfq~a@LKGnDg2@H=C)A8yM zimF;89@7Xm5mRk*6~JNZ&nW{g*vvbsk2X;gfLrS{53H3?%D1tbzS zVe>Nud1Rcg#W05v4x=0naTo)r_FuyFC@>4W5Mh{3^?5iUS~IwAITnvwHFg!&sa(zO zMgO9?+WllX?=;hGIp}?V^!w^6<8xzG6dTXYxoonz#xl$G6Gqdvc){WW@xBWnB{g|KNy?fW@4mIPoCIl7 z8IgPCqcUfr*Rt{#-#xPOZ;dxa`Il&_@=uL7P5B3J$%OE)Dl)uZ-{(o&}oxA4Z69D*ylh literal 0 HcmV?d00001 diff --git a/util/__pycache__/scan_file_util.cpython-39.pyc b/util/__pycache__/scan_file_util.cpython-39.pyc new file mode 100644 index 0000000000000000000000000000000000000000..25bb17fa8960bafeb65f246384f23e0644ee8608 GIT binary patch literal 15999 zcmeHOTW}OtdhVWUYcv<#g)YEw5gvK+@{=e`4`<#f+=aKMx`OWFb@7|E4f2WJepA8q^z>)ne1d~*W z$;_6JRau;Es!g2jsvT#0!jW{UPMPvJ60W3Mbte^7N!F+}BF>rcBx}{$q*wJOeX1|% zSN+L=8W3r&L|w98truZ;q9NI+Hj1#4Xi7G#%_3ZrXi2uJts?A6tV*`2Z6aKoXis*i z9kLYbn3B{^=4HNflDeAtSperXtd7;=yp}bvMx2AJi8bTAjoY%8etPST4teth> zyfN0rIwx%We`6b`B-C#UOWms{cY1c^^zk$g28V*r#8c14gW+^)%yT%Nj0N!<3J&xQ z^!N7n_YMXh8|v%x49Bw3AO_33+iupdWL8V0$Kok7KtVl{ozQqJ zn&!-`qlj!Qm5ryyG#1e#-A>ax64OWa9y%Bv#!7j>lLo5#v#rx?B9M#<>oM9Y^+`FpYF+O3KT+Y{*k` z-ezFp=GZA@8^#O*;$+v$sjtWm!7)HQhPw zG7!!?Uy@E>sxjv|#c&QwsMlq_F`Sg0j&kz%r^Ytu1)AE`v-=H^~Y&7bO2BV%e zT(;*MD{==|W1F;Wi6$e!nsE1KoSSiO!MO$JR=vsa8ZAcals&&nZ=PDg2d=Ub_m%C8DOg+8>ic^OKc57O{EZXBW*2e z3tsaYGFx}qKHYI0FOuQbJ5dA5PRSS_x8Y{%#aXdxM_4fw5vSmZZJ1t-CxN+w@uMfR zruZzri?oduX=@EH)~J(vv{X*RgFzM_e{K*>elP$W?uW{%@58OKKe=F;^mLu`(@$R=Wf1#iQ-><>E;K2U$}btHy{4+=KGfm zKRiEk>8}g#T)p+iyM>Qlx%vJ(4wA`kbJ{t+CqJ%eCCF%dP_APKsQC|+>YU)qZE7e`)NLLG;;=8F?A*vNhKpkp}vmh zpgWFcqmh(GnKZf#WzLwM$7!IV>15`KZrtO4a7dG#6sKYILXSh2L=xF25GZB)qz_qU z{^mM*UHHq_Z+`S=7@yha|7zyk&u3ow)y(C0t)akJ-TL6d%sYR6>#a+Lf4E{wlYJw5 zcWBU;+MfRY$3l;4qk}s`gBq#lzQ_6=)eiLU>cekG|09EgkA(F2i4&8(cTxh%&y+)x zJ;Ou2)WKdFh2F|h=p`kdjHDuCu_SbAFADTdIzm*=K_yHZKW;isX49$3p56$Lj>k{O zgtG7Dv1D40EdYRG)Ao}J#&dC{b9yGHn~qFGAIG(W5hysR+N|k2Jf2G)PetO1Lr69K zyVHq8EUL%TsoiN7GyTJHz>J>eXNF@Lq}PP?2-gX~=IXni?deMPbg^L9zM-yzLtSCh zy(h&eO|7*CN3_7SC)rN3Mm$(*K{1$(PsU8|?nFAr4gm$wX}%uM&I}w+=Ta=f&y2*< z$*|6EcujwFL8H{h?07m2$Qq-2Sq;jn^_7v-j;!jYdmdyvc|9st#XP4@TA?uRulk~N z|J~`+F^-W}8al_}>U@CO8^~hV z(JHzty-4Wxd<3j7Rh8{DTd}DE4^1}vgyq5cUzV(JsK2e9F>*i@0+iO0yh zsHi)Qp-HE*rkymz{5wo)oNq=BzJ&xSP1BZ(@@*8bbG+|0f;vX&-F8_MK`3b6Lit-s zJOe=rKYJBIa=L7atx0y^ui#%Ndu$DI6G9YY{XI6n?5D8JBe%$2o5$|LwZ~RxbIA?j zOeGbhw8$+;bI3Ntdu0#)%ACV(`>n@QCo48D4ywsdc~P57M&92c_op7a*Nz%&%BP}r zQEFJ}*NH%xGX6K_9Fy~)jSK`4BtRDQ(FPE=gMbl%bbvta09hmg=>&n?!2n`{KmyW4 zu1X5&1ch`JDdb+=HSMmTkZz_dppXtw$d^$wQAj5!#j9vVji8XY1M`4Fdh&{{oUF;$ zKzhnF^1t%6%x=`4miW86>tyY;mnaOY1u^lW7AwwIrkIjNT|`cQk=`Wq07zSn;Q?v$ zq3*i8Ul2zh^P`r4AdiANvIdaHIzb-&ppT%6AdmG6N`MqDTcW}63DVezGm*w7oLg~j z7UZ$XXl72f>O9CT+Cv1=B9S6bDTTEOGTB^_3d&fO+6qFZfK=LzX3)wOK_pw~c_2)5 zvPIBIpP-WUf|`Ls8V#io^PpWp)VfZP%Jmkh1g!<}6L~fett^sEQCFE{ZY+{aaknDL zq&Nl7w5x(h((@{j%&SP-RFPI9nJsKH+OUP5;Dj=ELjdqaC4%Sm7nQb(CyF)nwBSGpu{>5L+{Ol?jYdzn7s=sfbaDK{S zJ|cC9$MhUe4T+0SYEWp;P%qMNy?Z@JwpZc9vxQ$=nz{J)%#|ym`2K?V0N#N>KG48^@~NBej?Z@?H6#cyCbuq6o=TnMK`OO@1gYyPwK^->y_FJ3x$^BKdPsyw^pYSxBWMEer_c@( z#1!}=BpxO47>OYge?a1K5`ReI2@+o?@ePO@>-h+U%Ye;y(&Oby%qJ)=K_W>)%)BxJn*N0o z?)%761VezJ&1Lfx0TJ-E2oMRhJaVU% z84XwYw*U2B@kp^fuQl_gW!cP5d=vIE(L;vRUjw~Az_*ko>*YG z02z4-H7yBETy>v8i*B#FKYbWg_Z(Drtx*dO?U$?8*G}MDw`%$v}t-5*ff2)ui+K3)cKFzD_nVX_U+dS7hVj)Nn5!5_RMQPuTtuj zmW`#q*P(o+K{K+qpIp2HS#nBB<&8@O( z7hu_V#JAedY1VMEPnNN0>KDzlM=%TT(5Tr6qsGET@4Yl?40PxA$oSBD`0frFm-6rh z1RB;s53dvrtrB0MA$>;vJZMNBzAp(4KOu#<2n`1dSKh<2j^BKEVPtR5U{C}vt`Hu{ zbF&a02Q^fng~RjU2*p&2ydTi`b{QHs6crXNT3O!4QQgIM&bmn#3XA6l6k5vS*}f87 zEcN>L7Zt@wJ-!VT=iftE#D^~!NVQ8C zj9vhnZyAGeDOmKEjl{a5k%%=?tg*^SY^pF4trA!T#g;2UMk0OiA|sKEMf&hX#$x4% zueK$OL|P~1WFm#NFESDrr&b$@w1TQ+B!2bbi&j?^KYZ~#sd?e9qmM?Fc!nj8^FZbr z-K``MTwc`#}vUn2ofE$blQfu>8WElYSr zDULj%-4$NXN{{HjAZ=?!+Pp_}8``kF$|KrS_K1drN3_(sl1CJ6gGaQt$|KrW^oaH^ z@Q4mndqfLAd8=^#@A_c_uF&Ko=U^p7mRJ7I17&Tvf0wrZJKD(8!~5BWK?^x)ah>?yWh!Akoe_}w zFaas8{&}$7qW;RsaIdhmVezy(f;ssGnt&Uz@^L^OxclWE7x2VxZvuXWsO7bf4+f%k zu+C)=72p1<@R4H5<=C1{bL>*ijQ3viGQ3vJ`)gB&r@_r!cKi>vP2UdWjAmc^1={X85hF z-ugsGKN~>#d8++I5>q6^l94iw@|P&?VS-TN0}ly=(k!??5Gv-|-#(xa`ZAuFsc6`0 z6;Eke_a@V=X)GPpG;0N|Smrv4JZclR!6f2Qv6m*4jp?-AC@U7%s&&|qmQ1HYdODrR zhOiB!Vp*<}$~Fs1LA$I%a0MJJ?I{v29^rNuJ`BOXXa-^H6775w`D4#Tk{N8PA!@5O zi+ymkx3;(~PHejs3vyNed>Vzsid=P-h^*QXCX)SBpHC#y-VzZPK%BWYlF62~;)cqh zZ0LA#pDwSXl1`Cgt?8vjr<|5Aar#or$uG@cAVKtmmj;;jv0zli;;ebmiY%t`coI&i oMC=JTE~IP-!r>IVJFo+!N$$knkMiHf&a7$zx7mgTq&RxI0qVK9PXm=AU0?9v%&+F{vk$i)M?3I;UQq*J99Wl(gp z7&0)x+p*|hun#-zxKj`NC%g{D(7&+L9?EHvVI=r|kbGb2dHGOjyB!gOA^B%Vh^`&^bCh z>L2tE5Bf))et*#aGCGyzc+mMNpT2qh+nZls|NY0G(OcvpU20piRF`sSbf(VF#?;MT zHxD|*_y(h#Kp0XHLsv|HXV`_e<`vmc*nUM;^wpyz5IdZLR_r4pFo2R)-kJ$d@C`#X zfc3%?K77#srML7Auc!!D?9~^`pbAhCe;tTmgL+W{zp(*r9^W99+6Af9O;Xk5X`U_a z9X|wy&I_r_tWXd3`@O^cj#OimPMts4pPR|O`}+@1qGRkji589M_`q3~MwTqNdqE4o z-qiK<2f$;wl9y8(zDr^0B?ub20v#bxyJFaema}OJvbIby)6$qLx8&+#{Y#x0X#*kg z3DWxGX;I2<11}#Ll1#-7Y)y)}*%~1G*3jvt^+b`!!8W0#lmA^-odUOo40mzXH9;e~ zLwC`g)StU=2$LjFGnFKZJKqi*Tq8?!AY@+j;B?!zktCuRCrNx8eh;Via!?<-hNnM6 z5`D*s51nuvuSr?c;r$UR&1I5UDnL;DPi}I{y&PQ=EIiH&F`vp4_pd4UAh^GS_f&Tt Jw$LMI{|gYaTh9Oh literal 0 HcmV?d00001 diff --git a/util/copy_util.py b/util/copy_util.py new file mode 100644 index 0000000..140470f --- /dev/null +++ b/util/copy_util.py @@ -0,0 +1,42 @@ +""" +Author : XinYi Song +Time : 2021/10/13 10:13 +Desc: 复制文件 +""" +import os +from shutil import copy + + +# 将文件复制到另一个文件夹中 +def copyToDir(from_path, to_path): + # 如果 to_path 目录不存在,则创建 + if not os.path.isdir(to_path): + os.makedirs(to_path) + copy(from_path, to_path) + + +# 将一个文件夹中所有的文件复制到另一个文件夹中 +def copyToDirAll(path, path_two): + """ + :param path: 路径1 + :param path_two: 路径2 + :return: + """ + if os.path.isdir(path) and os.path.isdir(path_two): # 判断传入的值为文件夹 + a = os.listdir(path) # 读取该路径下的文件为列表 + for i in a: + po = os.path.join(path, i) # 路径1拼接 + po_two = os.path.join(path_two, i) # 路径2拼接 + with open(po, "rb") as f: + res_one = f.read() + with open(po_two, "wb") as a: + a.write(res_one) + print("{}复制成功".format(i)) + else: + print("不是文件夹 ") + + +if __name__ == '__main__': + path1 = 'D:/img' + path_two1 = 'D:/image' + copyToDirAll(path1, path_two1) diff --git a/util/file_store_path.py b/util/file_store_path.py new file mode 100644 index 0000000..439bc99 --- /dev/null +++ b/util/file_store_path.py @@ -0,0 +1,84 @@ +""" +Author : XinYi Song +Time : 2021/11/4 9:27 +Desc: +""" +import datetime +import os +import time + + +def file_store_path(time_stamp): + """ + :param time_stamp: 时间戳类型时间 + :return: + """ + now = int(round(time_stamp * 1000)) + t = time.localtime(now / 1000) + return os.path.join('E:/data/upload', str(t[0]), str(t[1]), str(t[2])) + + +def file_store_path_year(data_str_time, upload_path): + """ + 目录到年 + :param upload_path: + :param data_str_time: 字符串类型 + :return: + """ + t = time.strptime(data_str_time, '%Y-%m-%d %H:%M:%S') + return os.path.join(upload_path, str(t[0])) + + +def file_store_path_month(data_str_time, upload_path): + """ + 目录到月 + :param upload_path: + :param data_str_time: + :return: + """ + t = time.strptime(data_str_time, '%Y-%m-%d %H:%M:%S') + return os.path.join(upload_path, str(t[0]), str(t[1])) + + +def file_store_path_day(data_str_time, upload_path): + """ + 目录到日 + :param upload_path: + :param data_str_time: 字符串类型的时间 + :return: + """ + t = time.strptime(data_str_time, '%Y-%m-%d %H:%M:%S') + return os.path.join(upload_path, str(t[0]), str(t[1]), str(t[2])) + + +if __name__ == '__main__': + # time_stamp1 = time.time() + # print(time_stamp1) + str_time = '2020-06-08 09:33:07' + t = time.strptime(str_time, '%Y-%m-%d %H:%M:%S') + # path = os.path.join('../upload', str(t[0]), str(t[1]), str(t[2])) + # if not os.path.exists(path): + # os.makedirs(path) + # print(path) + # time_stamp = float(time_stamp1) + # now = int(round(time_stamp * 1000)) + # t = time.localtime(now / 1000) + # print(t) + + # list1 = ['张三', '李四'] + # token_s = dms_login() + # dms_list = dms_sensing_data(token_s) + # 数据库返回值 + # list2 = ['张三', '李四'] + + # d = [y for y in list2 if y not in list1] + # if d is None or len(d) == 0: + # print("d为空") + # else: + # print(d) + # file_dir = 'C:/Users/HP/Desktop/数管/' + # dir_list = os.listdir(file_dir) + # print(dir_list) + # timestring = '2016-12-21 10:22:56' + # print(time.mktime(time.strptime(timestring, '%Y-%m-%d %H:%M:%S'))) # 1482286976.0 + print(t) diff --git a/util/http_file_upload.py b/util/http_file_upload.py new file mode 100644 index 0000000..23bb749 --- /dev/null +++ b/util/http_file_upload.py @@ -0,0 +1,86 @@ +""" +Author : XinYi Song +Time : 2021/11/3 14:29 +Desc: +""" +from util.file_store_path import file_store_path_day, file_store_path_year, file_store_path_month + +""" +实现文件断点续传 +""" +import sys +import os +from hashlib import md5 + +FILE_DIR = os.path.dirname(__file__) + +BASE_DIR = os.path.dirname(os.path.dirname(__file__)) +home = os.path.join(BASE_DIR, "E:/data/upload") + + +# 定义一个函数,计算进度条 +def bar(num=1, sum=100): + rate = float(num) / float(sum) + rate_num = int(rate * 100) + temp = '\r%d %%' % rate_num + sys.stdout.write(temp) + + +def md5_file(name): + m = md5() + a_file = open(name, 'rb') #需要使用二进制格式读取文件内容 + m.update(a_file.read()) + a_file.close() + return m.hexdigest() + + +def upload_client(local_path, depth, dateTime): + global file_path + while True: + file_byte_size = os.stat(local_path).st_size # 获取文件的大小 + file_name = os.path.basename(local_path) # 设置文件名 + md5 = md5_file(local_path) + + has_sent = 0 + file_obj = open(local_path, 'rb') # 对文件进行读操作 + file_obj.seek(has_sent) # 调整指针 + if depth == 'year': + file_path = file_store_path_year(dateTime, home) + if not os.path.exists(file_path): + os.makedirs(file_path) + if depth == 'month': + file_path = file_store_path_month(dateTime, home) + if not os.path.exists(file_path): + os.makedirs(file_path) + if depth == 'day': + file_path = file_store_path_day(dateTime, home) + if not os.path.exists(file_path): + os.makedirs(file_path) + path = os.path.join(file_path, file_name) + has_received = 0 + + # 首先判断该路径下是否已存在文件 + if os.path.exists(path): + f = open(path, 'wb') + else: + f = open(path, 'wb') + + while has_sent < file_byte_size: + # 读出数据 + data = file_obj.read(1024) + try: + # 写入数据 + f.write(data) + has_received += len(data) + if not data: + raise Exception + except Exception: + flag = False + break + has_sent += len(data) + bar(has_sent, file_byte_size) # 进度条 + print("文件上传成功!") + file_obj.close() + f.close() + file_dict = {'fileName': file_name, 'md5': md5, 'file_size': file_byte_size, 'file_path': file_path, 'type': 'ok'} + return file_dict diff --git a/util/http_util.py b/util/http_util.py new file mode 100644 index 0000000..38dc858 --- /dev/null +++ b/util/http_util.py @@ -0,0 +1,158 @@ +# 导入requests包 +import json +import os +from typing import Dict + +import requests +from requests_toolbelt import MultipartEncoder + + +class httpUtil(object): + + def __init__(self, *, url: str, params: Dict = None, data: Dict = None, file_path: str = None, token: str = None): + self.url = url + self.params = params + self.file_path = file_path + self.data = data + self.token = token + + def get_no_param(self): + """ + get请求, 无参数 + :return: + """ + # 发送请求,获取响应, res即返回的响应对象 + res = requests.get(url=self.url) + return res + + def get(self): + """ + get请求 + :return: + """ + # 发送请求,获取响应, Res即返回的响应对象 + try: + res = requests.get(url=self.url, params=self.params) + return res + except: + return -1 + + def get_herder(self): + """ + get请求,带token + :return: + """ + # 发送请求,获取响应, Res即返回的响应对象 + try: + headers = { + "" + "Authorization": self.token + } + res = requests.get(url=self.url, params=self.params, headers=headers) + return res + except: + return -1 + + def post_no_patam(self): + """ + post请求 + :return: + """ + # 多行文本, 字符串格式,也可以单行(注意外层有引号,为字符串) data = '{"name": "hanzhichao", "age": 18}' + # data支持字典或字符串 + data = json.dumps(self.data) + res = requests.post(url=self.url, data=data) + return res + + def post_no_patam_herder(self): + """ + post请求 + :return: + """ + # 多行文本, 字符串格式,也可以单行(注意外层有引号,为字符串) data = '{"name": "hanzhichao", "age": 18}' + # data支持字典或字符串 + headers = { + "Content-Type": "application/json", + "Authorization": self.token + } + res = requests.post(url=self.url, data=json.dumps(self.data), headers=headers) + return res + + def post_patam_herder(self): + """ + post请求 + :return: + """ + # 多行文本, 字符串格式,也可以单行(注意外层有引号,为字符串) data = '{"name": "hanzhichao", "age": 18}' + # data支持字典或字符串 + headers = { + "Authorization": self.token + } + res = requests.post(url=self.url, param=self.params, headers=headers) + return res + + def post(self): + """ + post请求 + :return: + """ + # 多行文本, 字符串格式,也可以单行(注意外层有引号,为字符串) data = '{"name": "hanzhichao", "age": 18}' + # data支持字典或字符串 + res = requests.post(url=self.url, data=json.dumps(self.data), params=self.params) + return res + + def post_param(self): + """ + post请求 + :return: + """ + # 多行文本, 字符串格式,也可以单行(注意外层有引号,为字符串) data = '{"name": "hanzhichao", "age": 18}' + # data支持字典或字符串 + res = requests.post(url=self.url, params=self.params) + return res + + def post_file(self): + """ + 发送文件 + """ + filepath, filename = os.path.split(self.file_path) + payload = { + 'file': (filename, open(self.file_path, 'rb')) + } + m = MultipartEncoder(payload) + headers = { + "Content-Type": m.content_type, + "other-keys": "other-values" + } + res = requests.post(url=self.url, data=m, params=self.params, headers=headers) + return res + + +if __name__ == '__main__': + res = httpUtil(url='http://192.168.2.105:8820/api/login', + params={"userName": "client1", "password": "sxy1998"}).post_param() + print(res.json()['data']) + + token_s = res.json()['data'] + # res1 = httpUtil(url='http://192.168.2.105:8820/api/data-storage-task-record/get/collection-code/revision', + # params={"collectionCode": "GF4-0001", "revision": 1}, token=token_s).get_herder() + # print(res1.json()) + # + # res2 = httpUtil(url='http://192.168.2.105:8820/api/remote-sensing-data/get/collection-code', + # params={"collectionCode": "GF4-0001"}, token=token_s).get_herder() + # print(res2.json()) + + res3 = httpUtil(url='http://192.168.2.105:8820/api/dic-remote-sensing-data/get/code', + params={"code": "GF4-0001"}, token=token_s).get_herder() + print(res3.json()) + # res = httpUtil(url='http://192.168.2.105:8820/api/data-storage-task-record/add', + # data={"clientCode": "client1", "collectionCode": "GF4-0001", "storageFileList": "file_total_name", + # "storageFileSizeList": "file_total_name", "remarks": ""}, token=token_s).post_no_patam_herder() + # print(res.json()) + # res = httpUtil(url='http://192.168.2.105:8820/api/data-storage-task-record/end', + # params={"taskCode": 6856963234793680896}, token=token_s).post_patam_herder() + # print(res.json()) + # header = {"Authorization": token_s} + # res = requests.post(url='http://192.168.2.105:8820/api/data-storage-task-record/end', + # params={"taskCode": 6856963234793680896}, headers=header).json() + # print(res) diff --git a/util/json_util.py b/util/json_util.py new file mode 100644 index 0000000..bf85f98 --- /dev/null +++ b/util/json_util.py @@ -0,0 +1,44 @@ +import json +import os + + +def write_info(file_name, file_info): + dir = os.path.dirname(file_name) + if not os.path.exists(dir): + os.makedirs(dir) + with open('{}.json'.format(file_name), 'w', encoding='UTF-8') as fp: + json.dump(file_info, fp, indent=4, sort_keys=False) + + +def read_json(file_path): + with open(file_path, 'r') as f: + data = json.load(f) + return data + + +report_data = {"project_no": "628740635893760", "img_path": "2000.png", + "create_time": "2021-06-10T11:17:12.202000+00:00", "labels": [ + {"shape": "polygon", "name": "a", "line_width": 2, "width": 750.0, "height": 788.0, "comment": "string", + "color": "rgb(255, 0, 123)", "is_match": "False", "score": 0.0, + "point": [{"x": 114.04715127701375, "y": 53.04518664047151}, {"x": 196.2671905697446, "y": 53.04518664047151}, + {"x": 196.2671905697446, "y": 149.4106090373281}, + {"x": 114.04715127701375, "y": 149.4106090373281}]}, + {"shape": "polygon", "name": "a", "line_width": 2, "width": 750.0, "height": 788.0, "comment": "string", + "color": "rgb(255, 0, 123)", "is_match": "False", "score": 0.0, + "point": [{"x": 284.67583497053045, "y": 64.53831041257367}, {"x": 401.3752455795678, "y": 64.53831041257367}, + {"x": 401.3752455795678, "y": 266.1100196463654}, + {"x": 284.67583497053045, "y": 266.1100196463654}]}, + {"shape": "polygon", "name": "a", "line_width": 2, "width": 750.0, "height": 788.0, "comment": "string", + "color": "rgb(255, 0, 123)", "is_match": "False", "score": 0.0, + "point": [{"x": 501.2770137524558, "y": 148.52652259332024}, {"x": 623.2809430255403, "y": 148.52652259332024}, + {"x": 623.2809430255403, "y": 320.0392927308448}, + {"x": 501.2770137524558, "y": 320.0392927308448}]}]} + +if __name__ == "__main__": + path = os.path.abspath(os.path.dirname(__file__)) + print(path) + # write_info('', dict(report_data)) + # read_json('d://report.json') + # s = '/group1/628740635893760/ori/images/7 (1).png' + # s1 = s.replace('images', 'labels') + # print(s[2]) diff --git a/util/md5_util.py b/util/md5_util.py new file mode 100644 index 0000000..a9e0cb4 --- /dev/null +++ b/util/md5_util.py @@ -0,0 +1,16 @@ +import hashlib + + +class Md5Util(object): + + def __init__(self, *, salt: str, password: str): + self.salt = salt + self.password = password + + def md5(self): + # 实例化对象, 加盐 + obj = hashlib.md5(self.salt.encode('utf-8')) + # 加密密码 + obj.update(self.password.encode('utf-8')) + # 提取密码,返回 + return obj.hexdigest() diff --git a/util/remote_sensing_util.py b/util/remote_sensing_util.py new file mode 100644 index 0000000..18440ff --- /dev/null +++ b/util/remote_sensing_util.py @@ -0,0 +1,91 @@ +""" +Author : XinYi Song +Time : 2021/11/4 16:59 +Desc: +""" +import rasterio +import requests + +from util.xml_util import xml_to_dict, dict_to_xml + + +def gf4_pmi_001(file_name, xml_name): + """ + + :param file_name: 扫描文件传过来的遥感数据源文件的路径 + :param xmlPath: 解析出的xml文件存储的路径 + :param ThumbnailPath: 解析出的缩略图文件存储的路径 + :return: + """ + file_path = 'E:/sensing/GF4_PMI_001/' + with rasterio.open(file_path+file_name, 'r') as ds: + # 存放xml,缩略图的文件夹,由根文件夹+数据集代码命名的文件夹组成 + print('该栅格数据的基本数据集信息:') + CollectionCode = 'GF4_PMI_001' # 数据集代码 + DataFormat = ds.driver # DataFormat 数据格式 + NumberBands = ds.count # NumberBands 波段数目 + ImageWidth = ds.width # ImageWidth 影像宽度 + ImageHeight = ds.height # ImageHeight 影像高度 + GeographicScope = ds.bounds # GeographicScope 地理范围 + ReflectionParameter = ds.transform # ReflectionParameter 反射变换参数(六参数模型) + ProjectionDefinition = ds.crs # ProjectionDefinition 投影定义 + # print(CRS.from_epsg(4326)) + # 获取第一个波段数据,跟GDAL一样索引从1开始 + # 直接获得numpy.ndarray类型的二维数组表示,如果read()函数不加参数,则得到所有波段(第一个维度是波段) + band1 = ds.read(1) + FirstBindMax = band1.max() # FirstBindMax 第一波段的最大值 + + FirstBindMin = band1.min() # FirstBindMin 第一波段的最小值 + FirstBindAverage = band1.mean() # FirstBindAverage 第一波段的平均值 + # 根据地理坐标得到行列号 + x, y = (ds.bounds.left + 300, ds.bounds.top - 300) # 距离左上角东300米,南300米的投影坐标 + row, col = ds.index(x, y) # 对应的行列号 + print(f'(投影坐标{x}, {y})对应的行列号是({row}, {col})') + ProjectedCoordinates = x, y # ProjectedCoordinates 投影坐标 + RowNumber = row, col # RowNumber 对应的行列号 + # 根据行列号得到地理坐标 + x, y = ds.xy(row, col) # 中心点的坐标 + print(f'行列号({row}, {col})对应的中心投影坐标是({x}, {y})') # + CenterProjectionCoordinates = x, y # CenterProjectionCoordinates 中心投影坐标 + # 'C:/Users/HP/Desktop/Number tube/GF4_PMI_E119.8_N35.3_20210908_L1A0000417337/GF4_PMS_E119.8_N35.3_20210908_L1A0000417337.xml' + # 传入xml文件路径,解析xml文件 + # xml_name 存储后的xml文件的路径+文件名 + xml_dict = xml_to_dict(file_path+xml_name) + StartTime = xml_dict['ProductMetaData']['StartTime'] # 开始采集时间 + EndTime = xml_dict['ProductMetaData']['EndTime'] # 结束采集时间 + CloudPercent = xml_dict['ProductMetaData']['CloudPercent'] # 云覆盖量百分比 + TopLeftLatitude = xml_dict['ProductMetaData']['TopLeftLatitude'] # 左上纬度 + TopLeftLongitude = xml_dict['ProductMetaData']['TopLeftLongitude'] # 左上经度 + TopRightLatitude = xml_dict['ProductMetaData']['TopRightLatitude'] # 右上纬度 + TopRightLongitude = xml_dict['ProductMetaData']['TopRightLongitude'] # 右上经度 + BottomRightLatitude = xml_dict['ProductMetaData']['BottomRightLatitude'] # 右下纬度 + BottomRightLongitude = xml_dict['ProductMetaData']['BottomRightLongitude'] # 右下经度 + BottomLeftLatitude = xml_dict['ProductMetaData']['BottomLeftLatitude'] # 左下纬度 + BottomLeftLongitude = xml_dict['ProductMetaData']['BottomLeftLongitude'] # 左下经度 + boundaryGeomStr = f'POLYGON(({TopLeftLatitude} {TopLeftLongitude},' \ + f'{TopRightLatitude} {TopRightLongitude},' \ + f'{BottomRightLatitude} {BottomRightLongitude},' \ + f'{BottomLeftLatitude} {BottomLeftLongitude},' \ + f'{TopLeftLatitude} {TopLeftLongitude}))' + # ThumbnailPath 存储后的缩略图的路径+文件名 ThumbnailName 缩略图文件名称 + # xmlPath 存储后的xml文件路径+文件名 xmlFileName xml文件名称 + sensing_dict = {'StartTime': StartTime, 'EndTime': EndTime, 'CloudPercent': CloudPercent, + 'boundaryGeomStr': boundaryGeomStr, 'DataFormat': DataFormat, 'NumberBands': NumberBands, + 'ImageWidth': ImageWidth, 'ImageHeight': ImageHeight, 'GeographicScope': GeographicScope, + #'ReflectionParameter': ReflectionParameter, 'ProjectionDefinition': ProjectionDefinition, + #'FirstBindMax': FirstBindMax, 'FirstBindMin': FirstBindMin, + 'FirstBindAverage': FirstBindAverage, + 'ProjectedCoordinates': ProjectedCoordinates, 'RowNumber': RowNumber, + #'CenterProjectionCoordinates': CenterProjectionCoordinates, + 'CollectionCode': CollectionCode, + "ThumbnailPath": file_path+"GF4_IRS_E119.8_N35.3_20210908_L1A0000417337_thumb.jpg", + "ThumbnailName": "GF4_IRS_E119.8_N35.3_20210908_L1A0000417337_thumb.jpg", + "xmlPath": "", "xmlFileName": "", + 'DirectoryDepth': 'day'} + return sensing_dict + + +if __name__ == '__main__': + file_path = 'C:/Users/HP/Desktop/Number tube/GF4_PMI_E119.8_N35.3_20210908_L1A0000417337/GF4_PMS_E119.8_N35.3_20210908_L1A0000417337.tiff' + xml_path = 'C:/Users/HP/Desktop/Number tube/GF4_PMI_E119.8_N35.3_20210908_L1A0000417337/GF4_PMS_E119.8_N35.3_20210908_L1A0000417337.xml' + gf4_pmi_001(file_path, xml_path) diff --git a/util/scan_file_util.py b/util/scan_file_util.py new file mode 100644 index 0000000..3080cea --- /dev/null +++ b/util/scan_file_util.py @@ -0,0 +1,1195 @@ +""" +Author : XinYi Song +Time : 2021/11/3 9:00 +Desc: +""" +import os +import time + +import requests + +from application.settings import Config +from common.tools.dms import dms_login, dms_task_record, dms_sensing_data +from scan_data.GetMetaInfo import GetGFPMSData, GetGF3MDJData, GetH08Data, GetSentinel1Data, GetSentinel2Data, \ + GetSentinel3OLData, GetHJ1Data, GetZY3Data, GetSNPPData +from scan_data.example import GetJPSSData +from util.http_file_upload import upload_client +from util.http_util import httpUtil +from util.remote_sensing_util import gf4_pmi_001 +from util.xml_util import dict_to_xml +from apscheduler.schedulers.blocking import BlockingScheduler + +sched = BlockingScheduler() + + +def list_dir(file_dir): + """ + 通过 listdir 得到的是仅当前路径下的文件名,不包括子目录中的文件,如果需要得到所有文件需要递归 + """ + # 用户登录 + token_s = dms_login() + # 判断定时任务是否在进行 + task = dms_task_record(token_s) + # 如果不是空说明正在进行 + if task is not None and len(task) > 0: + return + fileNameList = [] + dms_list = dms_sensing_data(token_s) + for dms in dms_list: + fileNameList.append(dms['fileName']) + dir_list = os.listdir(file_dir) + # 判断扫描出的文件和已有的文件,将多出的文件进行解析 + d = [y for y in dir_list if y not in fileNameList] + if d is None or len(d) == 0: + print('没有多余的遥感数据文件,终止程序') + return + file_total_size = "" + file_total_name = "" + xmlPath = '' # 解析出的xml文件保存的路径 + ThumbnailPath = '' # 解析出的缩略图保存的路径 + for cur_file in d: + # 获取文件的绝对路径 + path = os.path.join(file_dir, cur_file) + if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径 + file_name = 'GF4_PMS_E119.8_N35.3_20210908_L1A0000417337.tiff' + xml_path = 'GF4_PMS_E119.8_N35.3_20210908_L1A0000417337.xml' + # 解析遥感数据文件(demo) + gf4_sensing = gf4_pmi_001(file_name, xml_path) + file_name = os.path.basename(path).split('.')[0] + xml_path = 'D:/file/work/pythonyuanma/dms_management/xml/' + file_name + '.xml' + # 将遥感数据写入xml中,并保存本地文件夹中 + dict_to_xml(gf4_sensing, xml_path) + + # 配置文件服务器参数 + url = Config.DFS_UPLOAD_URL + files = {'file': open(xml_path, 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/GF4_PMI_001'} # 参阅浏览器上传的选项 + # 上传生成的xml文件到文件服务器 + xml = requests.post(url, data=options, files=files) + + url = Config.DFS_UPLOAD_URL + files = {'file': open(gf4_sensing['ThumbnailPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/GF4_PMI_001'} # 参阅浏览器上传的选项 + # 上传生成的xml文件到文件服务器 + ThumbnailName = requests.post(url, data=options, files=files) + + CollectionCode = gf4_sensing['CollectionCode'] + DirectoryDepth = gf4_sensing['DirectoryDepth'] + StartTime = gf4_sensing['StartTime'] + uc = upload_client(path, DirectoryDepth, StartTime) + + StartTime = time.mktime(time.strptime(gf4_sensing['StartTime'], '%Y-%m-%d %H:%M:%S')) + EndTime = time.mktime(time.strptime(gf4_sensing['EndTime'], '%Y-%m-%d %H:%M:%S')) + + # 入库遥感数据 + httpUtil(url=Config.RESING_DATA_URL, + data={"collectionCode": "GF4_PMI_001", "shootingTimeStartTs": StartTime, + "shootingTimeEndTs": EndTime, + "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'], + "fileSize": uc['file_size'], "cloudCoverage": gf4_sensing['CloudPercent'], + "metaInformationFile": xml.json()['path'], + "thumbnailFile": ThumbnailName.json()['path'], + "remarks": "", "boundaryGeomStr": gf4_sensing['boundaryGeomStr']}, + token=token_s).post_no_patam_herder() + + file_total_size = file_total_size + str(uc['file_size']) + file_total_size = file_total_size + "," + + file_total_name = file_total_name + uc['fileName'] + file_total_name = file_total_name + "," + # print("========"+suffix) + print("{0} : is file!".format(cur_file)) + if os.path.isdir(path): + print("{0} : is dir!".format(cur_file)) + list_dir(path) # 递归子目录 + if uc['type'] == 'ok': + continue + # 添加遥感数据归档任务 + res = httpUtil(url=Config.DATA_TASK_URL, + data={"clientCode": "client1", "collectionCode": "GF4_PMI_001", "storageFileList": file_total_name, + "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder() + task_code = res.json()['data'] + + # 结束遥感数据归档任务 + header = {"Authorization": token_s} + res = requests.post(url=Config.DATA_END_TASK_URL, + + params={"taskCode": task_code}, headers=header).json() + + +def scan_VJ102_dir(): + """ + 解析JPSS-VJ102元数据 + :return: + """ + file_dir = 'E:/数管' + print('开始扫描VJ102IMG数据集') + collectionCode = 'VJ102IMG' + # 用户登录 + token_s = dms_login() + # 判断定时任务是否在进行 + task = dms_task_record(token_s, collectionCode) + # 如果不是空说明正在进行 + if task is not None and len(task) > 0: + return + fileNameList = [] + dms_list = dms_sensing_data(token_s, collectionCode) + for dms in dms_list: + fileNameList.append(dms['fileName']) + dir_list = os.listdir(file_dir) + # 判断扫描出的文件和已有的文件,将多出的文件进行解析 + d = [y for y in dir_list if y not in fileNameList] + if d is None or len(d) == 0: + print('没有多余的遥感数据文件,终止程序') + return + file_total_size = "" + file_total_name = "" + xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径 + ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径 + for cur_file in d: + if 'VJ102IMG' in cur_file and os.path.splitext(cur_file)[1] == '.nc': + + # 获取文件的绝对路径 + path = os.path.join(file_dir, cur_file) + print(path) + if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径 + # 解析遥感数据文件(demo) + JPSSData_dict = GetJPSSData(path, xmlPath, ThumbnailPath) + + # 配置文件服务器参数 + url = Config.DFS_UPLOAD_URL + files = {'file': open(JPSSData_dict['xmlPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode} + # 上传生成的xml文件到文件服务器 + xml = requests.post(url, data=options, files=files) + + url = Config.DFS_UPLOAD_URL + files = {'file': open(JPSSData_dict['ThumbnailPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode} + # 上传生成的xml文件到文件服务器 + ThumbnailName = requests.post(url, data=options, files=files) + + CollectionCode = JPSSData_dict['CollectionCode'] + DirectoryDepth = JPSSData_dict['DirectoryDepth'] + StartTime = JPSSData_dict['StartTime'] + uc = upload_client(path, DirectoryDepth, StartTime[0:19]) + + StartTime = time.mktime(time.strptime(JPSSData_dict['StartTime'][0:19], '%Y-%m-%d %H:%M:%S')) + EndTime = time.mktime(time.strptime(JPSSData_dict['EndTime'][0:19], '%Y-%m-%d %H:%M:%S')) + + # 入库遥感数据 + res_data = httpUtil(url=Config.RESING_DATA_URL, + data={"collectionCode": collectionCode, "shootingTimeStartTs": StartTime, + "shootingTimeEndTs": EndTime, + "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'], + "fileSize": uc['file_size'], "cloudCoverage": JPSSData_dict['CloudPercent'], + "metaInformationFile": xml.json()['path'], + "thumbnailFile": ThumbnailName.json()['path'], + "remarks": "", "boundaryGeomStr": JPSSData_dict['boundaryGeomStr']}, + token=token_s).post_no_patam_herder() + print(res_data.json()['data']) + + file_total_size = file_total_size + str(uc['file_size']) + file_total_size = file_total_size + "," + + file_total_name = file_total_name + uc['fileName'] + file_total_name = file_total_name + "," + # print("========"+suffix) + print("{0} : is file!".format(cur_file)) + if os.path.isdir(path): + print("{0} : is dir!".format(cur_file)) + list_dir(path) # 递归子目录 + if uc['type'] == 'ok': + continue + # 添加遥感数据归档任务 + res = httpUtil(url=Config.DATA_TASK_URL, + data={"clientCode": "client1", "collectionCode": collectionCode, "storageFileList": file_total_name, + "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder() + task_code = res.json()['data'] + + # 结束遥感数据归档任务 + header = {"Authorization": token_s} + res = requests.post(url=Config.DATA_END_TASK_URL, + + params={"taskCode": task_code}, headers=header).json() + + +def scan_VJ103_dir(): + """ + 解析JPSS-VJ103元数据 + :param file_dir: + :return: + """ + file_dir = 'E:/数管' + print('开始扫描VJ103IMG数据集') + collectionCode = 'VJ103IMG' + # 用户登录 + token_s = dms_login() + # 判断定时任务是否在进行 + task = dms_task_record(token_s, collectionCode) + # 如果不是空说明正在进行 + if task is not None and len(task) > 0: + return + fileNameList = [] + dms_list = dms_sensing_data(token_s, collectionCode) + for dms in dms_list: + fileNameList.append(dms['fileName']) + dir_list = os.listdir(file_dir) + # 判断扫描出的文件和已有的文件,将多出的文件进行解析 + d = [y for y in dir_list if y not in fileNameList] + if d is None or len(d) == 0: + print('没有多余的遥感数据文件,终止程序') + return + file_total_size = "" + file_total_name = "" + xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径 + ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径 + for cur_file in d: + if 'VJ103IMG' in cur_file and os.path.splitext(cur_file)[1] == '.nc': + + # 获取文件的绝对路径 + path = os.path.join(file_dir, cur_file) + if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径 + # 解析遥感数据文件(demo) + JPSSData_dict = GetJPSSData(path, xmlPath, ThumbnailPath) + + # 配置文件服务器参数 + url = Config.DFS_UPLOAD_URL + files = {'file': open(JPSSData_dict['xmlPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode} + # 上传生成的xml文件到文件服务器 + xml = requests.post(url, data=options, files=files) + + url = Config.DFS_UPLOAD_URL + files = {'file': open(JPSSData_dict['ThumbnailPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode} + # 上传生成的xml文件到文件服务器 + ThumbnailName = requests.post(url, data=options, files=files) + + CollectionCode = JPSSData_dict['CollectionCode'] + DirectoryDepth = JPSSData_dict['DirectoryDepth'] + StartTime = JPSSData_dict['StartTime'] + uc = upload_client(path, DirectoryDepth, StartTime[0:19]) + + StartTime = time.mktime(time.strptime(JPSSData_dict['StartTime'][0:19], '%Y-%m-%d %H:%M:%S')) + EndTime = time.mktime(time.strptime(JPSSData_dict['EndTime'][0:19], '%Y-%m-%d %H:%M:%S')) + + # 入库遥感数据 + res_data = httpUtil(url=Config.RESING_DATA_URL, + data={"collectionCode": collectionCode, "shootingTimeStartTs": StartTime, + "shootingTimeEndTs": EndTime, + "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'], + "fileSize": uc['file_size'], "cloudCoverage": JPSSData_dict['CloudPercent'], + "metaInformationFile": xml.json()['path'], + "thumbnailFile": ThumbnailName.json()['path'], + "remarks": "", "boundaryGeomStr": JPSSData_dict['boundaryGeomStr']}, + token=token_s).post_no_patam_herder() + print(res_data.json()['data']) + + file_total_size = file_total_size + str(uc['file_size']) + file_total_size = file_total_size + "," + + file_total_name = file_total_name + uc['fileName'] + file_total_name = file_total_name + "," + # print("========"+suffix) + print("{0} : is file!".format(cur_file)) + if os.path.isdir(path): + print("{0} : is dir!".format(cur_file)) + list_dir(path) # 递归子目录 + if uc['type'] == 'ok': + continue + # 添加遥感数据归档任务 + res = httpUtil(url=Config.DATA_TASK_URL, + data={"clientCode": "client1", "collectionCode": collectionCode, "storageFileList": file_total_name, + "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder() + task_code = res.json()['data'] + + # 结束遥感数据归档任务 + header = {"Authorization": token_s} + res = requests.post(url=Config.DATA_END_TASK_URL, + + params={"taskCode": task_code}, headers=header).json() + + +def scan_GF1_PMS2_dir(): + """ + 获取高分 PMS卫星元数据 + :return: + """ + file_dir = 'E:/数管' + print('开始扫描GF1_PMS2_001数据集') + collectionCode = 'GF1_PMS2_001' + # 用户登录 + token_s = dms_login() + # 判断定时任务是否在进行 + task = dms_task_record(token_s, collectionCode) + # 如果不是空说明正在进行 + if task is not None and len(task) > 0: + return + fileNameList = [] + dms_list = dms_sensing_data(token_s, collectionCode) + for dms in dms_list: + fileNameList.append(dms['fileName']) + dir_list = os.listdir(file_dir) + # 判断扫描出的文件和已有的文件,将多出的文件进行解析 + d = [y for y in dir_list if y not in fileNameList] + if d is None or len(d) == 0: + print('没有多余的遥感数据文件,终止程序') + return + file_total_size = "" + file_total_name = "" + xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径 + ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径 + for cur_file in d: + if 'GF1_PMS2' in cur_file[0:8] and os.path.splitext(cur_file)[1] == '.gz': + + # 获取文件的绝对路径 + path = os.path.join(file_dir, cur_file) + if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径 + # 解析遥感数据文件(demo) + GFPMS_dict = GetGFPMSData(path, xmlPath, ThumbnailPath) + + # 配置文件服务器参数 + url = Config.DFS_UPLOAD_URL + print(GFPMS_dict['xmlPath']) + files = {'file': open(GFPMS_dict['xmlPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode} + # 上传生成的xml文件到文件服务器 + xml = requests.post(url, data=options, files=files) + + url = Config.DFS_UPLOAD_URL + files = {'file': open(GFPMS_dict['ThumbnailPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode} + # 上传生成的xml文件到文件服务器 + ThumbnailName = requests.post(url, data=options, files=files) + + CollectionCode = GFPMS_dict['CollectionCode'] + DirectoryDepth = GFPMS_dict['DirectoryDepth'] + StartTime = GFPMS_dict['StartTime'] + uc = upload_client(path, DirectoryDepth, StartTime[0:19]) + + StartTime = time.mktime(time.strptime(GFPMS_dict['StartTime'][0:19], '%Y-%m-%d %H:%M:%S')) + EndTime = time.mktime(time.strptime(GFPMS_dict['EndTime'][0:19], '%Y-%m-%d %H:%M:%S')) + + # 入库遥感数据 + res_data = httpUtil(url=Config.RESING_DATA_URL, + data={"collectionCode": collectionCode, "shootingTimeStartTs": StartTime, + "shootingTimeEndTs": EndTime, + "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'], + "fileSize": uc['file_size'], "cloudCoverage": GFPMS_dict['CloudPercent'], + "metaInformationFile": xml.json()['path'], + "thumbnailFile": ThumbnailName.json()['path'], + "remarks": "", "boundaryGeomStr": GFPMS_dict['boundaryGeomStr']}, + token=token_s).post_no_patam_herder() + print(res_data.json()['data']) + + file_total_size = file_total_size + str(uc['file_size']) + file_total_size = file_total_size + "," + + file_total_name = file_total_name + uc['fileName'] + file_total_name = file_total_name + "," + # print("========"+suffix) + print("{0} : is file!".format(cur_file)) + if os.path.isdir(path): + print("{0} : is dir!".format(cur_file)) + list_dir(path) # 递归子目录 + if uc['type'] == 'ok': + continue + # 添加遥感数据归档任务 + res = httpUtil(url=Config.DATA_TASK_URL, + data={"clientCode": "client1", "collectionCode": collectionCode, "storageFileList": file_total_name, + "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder() + task_code = res.json()['data'] + + # 结束遥感数据归档任务 + header = {"Authorization": token_s} + res = requests.post(url=Config.DATA_END_TASK_URL, + + params={"taskCode": task_code}, headers=header).json() + + +def scan_GF3MDJ_dir(): + """ + 获取高分3号MDJ(GF-3 MDJ)卫星元数据 + :return: + """ + file_dir = 'E:/数管' + print('开始扫描GF3_MDJ_SS数据集') + collectionCode = 'GF3_MDJ_SS' + # 用户登录 + token_s = dms_login() + # 判断定时任务是否在进行 + task = dms_task_record(token_s, collectionCode) + # 如果不是空说明正在进行 + if task is not None and len(task) > 0: + return + fileNameList = [] + dms_list = dms_sensing_data(token_s, collectionCode) + for dms in dms_list: + fileNameList.append(dms['fileName']) + dir_list = os.listdir(file_dir) + # 判断扫描出的文件和已有的文件,将多出的文件进行解析 + d = [y for y in dir_list if y not in fileNameList] + if d is None or len(d) == 0: + print('没有多余的遥感数据文件,终止程序') + return + file_total_size = "" + file_total_name = "" + xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径 + ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径 + for cur_file in d: + if 'GF3_MDJ' in cur_file[0:7] and os.path.splitext(cur_file)[1] == '.gz': + + # 获取文件的绝对路径 + path = os.path.join(file_dir, cur_file) + if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径 + # 解析遥感数据文件(demo) + GF3_MDJ_SS_dict = GetGF3MDJData(path, xmlPath, ThumbnailPath) + + # 配置文件服务器参数 + url = Config.DFS_UPLOAD_URL + files = {'file': open(GF3_MDJ_SS_dict['xmlPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode} + # 上传生成的xml文件到文件服务器 + xml = requests.post(url, data=options, files=files) + + url = Config.DFS_UPLOAD_URL + files = {'file': open(GF3_MDJ_SS_dict['ThumbnailPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode} + # 上传生成的xml文件到文件服务器 + ThumbnailName = requests.post(url, data=options, files=files) + + CollectionCode = GF3_MDJ_SS_dict['CollectionCode'] + DirectoryDepth = GF3_MDJ_SS_dict['DirectoryDepth'] + StartTime = GF3_MDJ_SS_dict['StartTime'] + uc = upload_client(path, DirectoryDepth, StartTime[0:19]) + + StartTime = time.mktime(time.strptime(GF3_MDJ_SS_dict['StartTime'][0:19], '%Y-%m-%d %H:%M:%S')) + EndTime = time.mktime(time.strptime(GF3_MDJ_SS_dict['EndTime'][0:19], '%Y-%m-%d %H:%M:%S')) + + # 入库遥感数据 + res_data = httpUtil(url=Config.RESING_DATA_URL, + data={"collectionCode": collectionCode, "shootingTimeStartTs": StartTime, + "shootingTimeEndTs": EndTime, + "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'], + "fileSize": uc['file_size'], "cloudCoverage": GF3_MDJ_SS_dict['CloudPercent'], + "metaInformationFile": xml.json()['path'], + "thumbnailFile": ThumbnailName.json()['path'], + "remarks": "", "boundaryGeomStr": GF3_MDJ_SS_dict['boundaryGeomStr']}, + token=token_s).post_no_patam_herder() + print(res_data.json()['data']) + + file_total_size = file_total_size + str(uc['file_size']) + file_total_size = file_total_size + "," + + file_total_name = file_total_name + uc['fileName'] + file_total_name = file_total_name + "," + # print("========"+suffix) + print("{0} : is file!".format(cur_file)) + if os.path.isdir(path): + print("{0} : is dir!".format(cur_file)) + list_dir(path) # 递归子目录 + if uc['type'] == 'ok': + continue + # 添加遥感数据归档任务 + res = httpUtil(url=Config.DATA_TASK_URL, + data={"clientCode": "client1", "collectionCode": collectionCode, "storageFileList": file_total_name, + "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder() + task_code = res.json()['data'] + + # 结束遥感数据归档任务 + header = {"Authorization": token_s} + res = requests.post(url=Config.DATA_END_TASK_URL, + + params={"taskCode": task_code}, headers=header).json() + + +def scan_H08_dir(): + """ + 获取高分3号MDJ(GF-3 MDJ)卫星元数据 + :return: + """ + file_dir = 'E:/数管' + print('开始扫描H08数据集') + collectionCode = 'NC_H08' + # 用户登录 + token_s = dms_login() + # 判断定时任务是否在进行 + task = dms_task_record(token_s, collectionCode) + # 如果不是空说明正在进行 + if task is not None and len(task) > 0: + return + fileNameList = [] + dms_list = dms_sensing_data(token_s, collectionCode) + for dms in dms_list: + fileNameList.append(dms['fileName']) + dir_list = os.listdir(file_dir) + # 判断扫描出的文件和已有的文件,将多出的文件进行解析 + d = [y for y in dir_list if y not in fileNameList] + if d is None or len(d) == 0: + print('没有多余的遥感数据文件,终止程序') + return + file_total_size = "" + file_total_name = "" + xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径 + ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径 + for cur_file in d: + if 'NC_H08' in cur_file[0:6] and os.path.splitext(cur_file)[1] == '.nc': + + # 获取文件的绝对路径 + path = os.path.join(file_dir, cur_file) + if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径 + # 解析遥感数据文件(demo) + GetH08_dict = GetH08Data(path, xmlPath, ThumbnailPath) + + # 配置文件服务器参数 + url = Config.DFS_UPLOAD_URL + files = {'file': open(GetH08_dict['xmlPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode} + # 上传生成的xml文件到文件服务器 + xml = requests.post(url, data=options, files=files) + + url = Config.DFS_UPLOAD_URL + files = {'file': open(GetH08_dict['ThumbnailPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode} + # 上传生成的xml文件到文件服务器 + ThumbnailName = requests.post(url, data=options, files=files) + + CollectionCode = GetH08_dict['CollectionCode'] + DirectoryDepth = GetH08_dict['DirectoryDepth'] + StartTime = GetH08_dict['ProduceTime'][0:10] + ' ' + GetH08_dict['ProduceTime'][11:19] + uc = upload_client(path, DirectoryDepth, StartTime) + + StartTime = time.mktime(time.strptime(GetH08_dict['ProduceTime'][0:10] + ' ' + GetH08_dict['ProduceTime'][11:19], '%Y-%m-%d %H:%M:%S')) + EndTime = time.mktime(time.strptime(GetH08_dict['ProduceTime'][0:10] + ' '+ GetH08_dict['ProduceTime'][11:19], '%Y-%m-%d %H:%M:%S')) + + # 入库遥感数据 + res_data = httpUtil(url=Config.RESING_DATA_URL, + data={"collectionCode": collectionCode, "shootingTimeStartTs": StartTime, + "shootingTimeEndTs": EndTime, + "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'], + "fileSize": uc['file_size'], "cloudCoverage": GetH08_dict['CloudPercent'], + "metaInformationFile": xml.json()['path'], + "thumbnailFile": ThumbnailName.json()['path'], + "remarks": "", "boundaryGeomStr": GetH08_dict['boundaryGeomStr']}, + token=token_s).post_no_patam_herder() + print(res_data.json()['data']) + + file_total_size = file_total_size + str(uc['file_size']) + file_total_size = file_total_size + "," + + file_total_name = file_total_name + uc['fileName'] + file_total_name = file_total_name + "," + # print("========"+suffix) + print("{0} : is file!".format(cur_file)) + if os.path.isdir(path): + print("{0} : is dir!".format(cur_file)) + list_dir(path) # 递归子目录 + if uc['type'] == 'ok': + continue + # 添加遥感数据归档任务 + res = httpUtil(url=Config.DATA_TASK_URL, + data={"clientCode": "client1", "collectionCode": collectionCode, "storageFileList": file_total_name, + "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder() + task_code = res.json()['data'] + + # 结束遥感数据归档任务 + header = {"Authorization": token_s} + res = requests.post(url=Config.DATA_END_TASK_URL, + + params={"taskCode": task_code}, headers=header).json() + + +def scan_Sentinel1_dir(): + """ + 获取哨兵1号卫星元数据 + :return: + """ + file_dir = 'E:/数管' + print('开始扫描Sentinel1数据集') + collectionCode = 'S1A_IW_GRDH' + # 用户登录 + token_s = dms_login() + # 判断定时任务是否在进行 + task = dms_task_record(token_s, collectionCode) + # 如果不是空说明正在进行 + if task is not None and len(task) > 0: + return + fileNameList = [] + dms_list = dms_sensing_data(token_s, collectionCode) + for dms in dms_list: + fileNameList.append(dms['fileName']) + dir_list = os.listdir(file_dir) + # 判断扫描出的文件和已有的文件,将多出的文件进行解析 + d = [y for y in dir_list if y not in fileNameList] + if d is None or len(d) == 0: + print('没有多余的遥感数据文件,终止程序') + return + file_total_size = "" + file_total_name = "" + xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径 + ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径 + for cur_file in d: + if 'S1A_IW_GRDH' in cur_file[0:11] and os.path.splitext(cur_file)[1] == '.zip': + + # 获取文件的绝对路径 + path = os.path.join(file_dir, cur_file) + if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径 + # 解析遥感数据文件(demo) + Sentinel1_dict = GetSentinel1Data(path, xmlPath, ThumbnailPath) + + # 配置文件服务器参数 + url = Config.DFS_UPLOAD_URL + files = {'file': open(Sentinel1_dict['xmlPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode} + # 上传生成的xml文件到文件服务器 + xml = requests.post(url, data=options, files=files) + + url = Config.DFS_UPLOAD_URL + files = {'file': open(Sentinel1_dict['ThumbnailPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode} + # 上传生成的xml文件到文件服务器 + ThumbnailName = requests.post(url, data=options, files=files) + + CollectionCode = Sentinel1_dict['CollectionCode'] + DirectoryDepth = Sentinel1_dict['DirectoryDepth'] + StartTime = Sentinel1_dict['ProduceTime'][0:10] + ' ' + Sentinel1_dict['ProduceTime'][11:19] + uc = upload_client(path, DirectoryDepth, StartTime) + + StartTime = time.mktime(time.strptime(Sentinel1_dict['StartTime'][0:10] + ' ' + Sentinel1_dict['StartTime'][11:19], '%Y-%m-%d %H:%M:%S')) + EndTime = time.mktime(time.strptime(Sentinel1_dict['StopTime'][0:10] + ' ' + Sentinel1_dict['StopTime'][11:19], '%Y-%m-%d %H:%M:%S')) + + # 入库遥感数据 + res_data = httpUtil(url=Config.RESING_DATA_URL, + data={"collectionCode": collectionCode, "shootingTimeStartTs": StartTime, + "shootingTimeEndTs": EndTime, + "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'], + "fileSize": uc['file_size'], "cloudCoverage": Sentinel1_dict['CloudPercent'], + "metaInformationFile": xml.json()['path'], + "thumbnailFile": ThumbnailName.json()['path'], + "remarks": "", "boundaryGeomStr": Sentinel1_dict['boundaryGeomStr']}, + token=token_s).post_no_patam_herder() + print(res_data.json()['data']) + + file_total_size = file_total_size + str(uc['file_size']) + file_total_size = file_total_size + "," + + file_total_name = file_total_name + uc['fileName'] + file_total_name = file_total_name + "," + # print("========"+suffix) + print("{0} : is file!".format(cur_file)) + if os.path.isdir(path): + print("{0} : is dir!".format(cur_file)) + list_dir(path) # 递归子目录 + if uc['type'] == 'ok': + continue + # 添加遥感数据归档任务 + res = httpUtil(url=Config.DATA_TASK_URL, + data={"clientCode": "client1", "collectionCode": collectionCode, "storageFileList": file_total_name, + "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder() + task_code = res.json()['data'] + + # 结束遥感数据归档任务 + header = {"Authorization": token_s} + res = requests.post(url=Config.DATA_END_TASK_URL, + + params={"taskCode": task_code}, headers=header).json() + + +def scan_Sentinel2_dir(): + """ + 获取哨兵2号卫星元数据 + :return: + """ + file_dir = 'E:/数管' + print('开始扫描Sentinel2数据集') + collectionCode = 'S2B' + # 用户登录 + token_s = dms_login() + # 判断定时任务是否在进行 + task = dms_task_record(token_s, collectionCode) + # 如果不是空说明正在进行 + if task is not None and len(task) > 0: + return + fileNameList = [] + dms_list = dms_sensing_data(token_s, collectionCode) + for dms in dms_list: + fileNameList.append(dms['fileName']) + dir_list = os.listdir(file_dir) + # 判断扫描出的文件和已有的文件,将多出的文件进行解析 + d = [y for y in dir_list if y not in fileNameList] + if d is None or len(d) == 0: + print('没有多余的遥感数据文件,终止程序') + return + file_total_size = "" + file_total_name = "" + xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径 + ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径 + for cur_file in d: + if 'S2B' in cur_file[0:3] and os.path.splitext(cur_file)[1] == '.zip': + + # 获取文件的绝对路径 + path = os.path.join(file_dir, cur_file) + if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径 + # 解析遥感数据文件(demo) + Sentinel2_dict = GetSentinel2Data(path, xmlPath, ThumbnailPath) + + # 配置文件服务器参数 + url = Config.DFS_UPLOAD_URL + files = {'file': open(Sentinel2_dict['xmlPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode} + # 上传生成的xml文件到文件服务器 + xml = requests.post(url, data=options, files=files) + + url = Config.DFS_UPLOAD_URL + files = {'file': open(Sentinel2_dict['ThumbnailPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode} + # 上传生成的xml文件到文件服务器 + ThumbnailName = requests.post(url, data=options, files=files) + + CollectionCode = Sentinel2_dict['CollectionCode'] + DirectoryDepth = Sentinel2_dict['DirectoryDepth'] + StartTime = Sentinel2_dict['ProduceTime'][0:10] + ' ' + Sentinel2_dict['ProduceTime'][11:19] + uc = upload_client(path, DirectoryDepth, StartTime) + + StartTime = time.mktime(time.strptime(Sentinel2_dict['StartTime'][0:10] + ' ' + Sentinel2_dict['StartTime'][11:19], '%Y-%m-%d %H:%M:%S')) + EndTime = time.mktime(time.strptime(Sentinel2_dict['StopTime'][0:10] + ' ' + Sentinel2_dict['StopTime'][11:19], '%Y-%m-%d %H:%M:%S')) + + # 入库遥感数据 + res_data = httpUtil(url=Config.RESING_DATA_URL, + data={"collectionCode": collectionCode, "shootingTimeStartTs": StartTime, + "shootingTimeEndTs": EndTime, + "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'], + "fileSize": uc['file_size'], "cloudCoverage": Sentinel2_dict['CloudPercent'], + "metaInformationFile": xml.json()['path'], + "thumbnailFile": ThumbnailName.json()['path'], + "remarks": "", "boundaryGeomStr": Sentinel2_dict['boundaryGeomStr']}, + token=token_s).post_no_patam_herder() + print(res_data.json()['data']) + + file_total_size = file_total_size + str(uc['file_size']) + file_total_size = file_total_size + "," + + file_total_name = file_total_name + uc['fileName'] + file_total_name = file_total_name + "," + # print("========"+suffix) + print("{0} : is file!".format(cur_file)) + if os.path.isdir(path): + print("{0} : is dir!".format(cur_file)) + list_dir(path) # 递归子目录 + if uc['type'] == 'ok': + continue + # 添加遥感数据归档任务 + res = httpUtil(url=Config.DATA_TASK_URL, + data={"clientCode": "client1", "collectionCode": collectionCode, "storageFileList": file_total_name, + "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder() + task_code = res.json()['data'] + + # 结束遥感数据归档任务 + header = {"Authorization": token_s} + res = requests.post(url=Config.DATA_END_TASK_URL, + + params={"taskCode": task_code}, headers=header).json() + + +def scan_Sentinel3OL_dir(): + """ + 获取哨兵3号卫星元数据 + :return: + """ + file_dir = 'E:/数管' + print('开始扫描Sentinel3数据集') + collectionCode = 'Sentinel3_OLCI' + # 用户登录 + token_s = dms_login() + # 判断定时任务是否在进行 + task = dms_task_record(token_s, collectionCode) + # 如果不是空说明正在进行 + if task is not None and len(task) > 0: + return + fileNameList = [] + dms_list = dms_sensing_data(token_s, collectionCode) + for dms in dms_list: + fileNameList.append(dms['fileName']) + dir_list = os.listdir(file_dir) + # 判断扫描出的文件和已有的文件,将多出的文件进行解析 + d = [y for y in dir_list if y not in fileNameList] + if d is None or len(d) == 0: + print('没有多余的遥感数据文件,终止程序') + return + file_total_size = "" + file_total_name = "" + xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径 + ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径 + for cur_file in d: + if 'S3B' in cur_file[0:3] and os.path.splitext(cur_file)[1] == '.zip': + + # 获取文件的绝对路径 + path = os.path.join(file_dir, cur_file) + print(path) + if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径 + # 解析遥感数据文件(demo) + Sentinel3OL_dict = GetSentinel3OLData(path, xmlPath, ThumbnailPath) + print(Sentinel3OL_dict) + # 配置文件服务器参数 + url = Config.DFS_UPLOAD_URL + files = {'file': open(Sentinel3OL_dict['xmlPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode} + # 上传生成的xml文件到文件服务器 + xml = requests.post(url, data=options, files=files) + + url = Config.DFS_UPLOAD_URL + files = {'file': open(Sentinel3OL_dict['ThumbnailPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode} + # 上传生成的xml文件到文件服务器 + ThumbnailName = requests.post(url, data=options, files=files) + + CollectionCode = Sentinel3OL_dict['CollectionCode'] + DirectoryDepth = Sentinel3OL_dict['DirectoryDepth'] + StartTime = Sentinel3OL_dict['StartTime'][0:10] + ' ' + Sentinel3OL_dict['StartTime'][11:19] + uc = upload_client(path, DirectoryDepth, StartTime) + + StartTime = time.mktime(time.strptime(Sentinel3OL_dict['StartTime'][0:10] + ' ' + Sentinel3OL_dict['StartTime'][11:19], '%Y-%m-%d %H:%M:%S')) + EndTime = time.mktime(time.strptime(Sentinel3OL_dict['StopTime'][0:10] + ' ' + Sentinel3OL_dict['StopTime'][11:19], '%Y-%m-%d %H:%M:%S')) + + # 入库遥感数据 + res_data = httpUtil(url=Config.RESING_DATA_URL, + data={"collectionCode": collectionCode, "shootingTimeStartTs": StartTime, + "shootingTimeEndTs": EndTime, + "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'], + "fileSize": uc['file_size'], "cloudCoverage": Sentinel3OL_dict['CloudPercent'], + "metaInformationFile": xml.json()['path'], + "thumbnailFile": ThumbnailName.json()['path'], + "remarks": "", "boundaryGeomStr": Sentinel3OL_dict['boundaryGeomStr']}, + token=token_s).post_no_patam_herder() + print(res_data.json()['data']) + + file_total_size = file_total_size + str(uc['file_size']) + file_total_size = file_total_size + "," + + file_total_name = file_total_name + uc['fileName'] + file_total_name = file_total_name + "," + # print("========"+suffix) + print("{0} : is file!".format(cur_file)) + if os.path.isdir(path): + print("{0} : is dir!".format(cur_file)) + list_dir(path) # 递归子目录 + if uc['type'] == 'ok': + continue + # 添加遥感数据归档任务 + res = httpUtil(url=Config.DATA_TASK_URL, + data={"clientCode": "client1", "collectionCode": collectionCode, "storageFileList": file_total_name, + "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder() + task_code = res.json()['data'] + + # 结束遥感数据归档任务 + header = {"Authorization": token_s} + res = requests.post(url=Config.DATA_END_TASK_URL, + + params={"taskCode": task_code}, headers=header).json() + + +def scan_HJ1_dir(): + """ + 获取环境1号卫星元数据 + :return: + """ + file_dir = 'E:/数管' + print('开始扫描环境1号数据集') + collectionCode = 'HJ-1' + # 用户登录 + token_s = dms_login() + # 判断定时任务是否在进行 + task = dms_task_record(token_s, collectionCode) + # 如果不是空说明正在进行 + if task is not None and len(task) > 0: + return + fileNameList = [] + dms_list = dms_sensing_data(token_s, collectionCode) + for dms in dms_list: + fileNameList.append(dms['fileName']) + dir_list = os.listdir(file_dir) + # 判断扫描出的文件和已有的文件,将多出的文件进行解析 + d = [y for y in dir_list if y not in fileNameList] + if d is None or len(d) == 0: + print('没有多余的遥感数据文件,终止程序') + return + file_total_size = "" + file_total_name = "" + xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径 + ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径 + for cur_file in d: + if 'HJ1' in cur_file[0:3] and os.path.splitext(cur_file)[1] == '.gz': + + # 获取文件的绝对路径 + path = os.path.join(file_dir, cur_file) + if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径 + # 解析遥感数据文件(demo) + GetHJ1Data_dict = GetHJ1Data(path, xmlPath, ThumbnailPath) + + # 配置文件服务器参数 + url = Config.DFS_UPLOAD_URL + files = {'file': open(GetHJ1Data_dict['xmlPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode} + # 上传生成的xml文件到文件服务器 + xml = requests.post(url, data=options, files=files) + + url = Config.DFS_UPLOAD_URL + files = {'file': open(GetHJ1Data_dict['ThumbnailPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode} + # 上传生成的xml文件到文件服务器 + ThumbnailName = requests.post(url, data=options, files=files) + + CollectionCode = GetHJ1Data_dict['CollectionCode'] + DirectoryDepth = GetHJ1Data_dict['DirectoryDepth'] + StartTime = GetHJ1Data_dict['ProductTime'][0:10] + ' ' + GetHJ1Data_dict['ProductTime'][11:19] + uc = upload_client(path, DirectoryDepth, StartTime) + + StartTime = time.mktime(time.strptime(GetHJ1Data_dict['StartTime'][0:10] + ' ' + GetHJ1Data_dict['StartTime'][11:19], '%Y-%m-%d %H:%M:%S')) + EndTime = time.mktime(time.strptime(GetHJ1Data_dict['EndTime'][0:10] + ' ' + GetHJ1Data_dict['EndTime'][11:19], '%Y-%m-%d %H:%M:%S')) + + # 入库遥感数据 + res_data = httpUtil(url=Config.RESING_DATA_URL, + data={"collectionCode": collectionCode, "shootingTimeStartTs": StartTime, + "shootingTimeEndTs": EndTime, + "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'], + "fileSize": uc['file_size'], "cloudCoverage": GetHJ1Data_dict['CloudPercent'], + "metaInformationFile": xml.json()['path'], + "thumbnailFile": ThumbnailName.json()['path'], + "remarks": "", "boundaryGeomStr": GetHJ1Data_dict['boundaryGeomStr']}, + token=token_s).post_no_patam_herder() + print(res_data.json()['data']) + + file_total_size = file_total_size + str(uc['file_size']) + file_total_size = file_total_size + "," + + file_total_name = file_total_name + uc['fileName'] + file_total_name = file_total_name + "," + # print("========"+suffix) + print("{0} : is file!".format(cur_file)) + if os.path.isdir(path): + print("{0} : is dir!".format(cur_file)) + list_dir(path) # 递归子目录 + if uc['type'] == 'ok': + continue + # 添加遥感数据归档任务 + res = httpUtil(url=Config.DATA_TASK_URL, + data={"clientCode": "client1", "collectionCode": collectionCode, "storageFileList": file_total_name, + "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder() + task_code = res.json()['data'] + + # 结束遥感数据归档任务 + header = {"Authorization": token_s} + res = requests.post(url=Config.DATA_END_TASK_URL, + + params={"taskCode": task_code}, headers=header).json() + + +def scan_ZY3_dir(): + """ + 获取资源3号卫星元数据 + :return: + """ + file_dir = 'E:/数管' + print('开始扫描资源3号数据集') + collectionCode = 'ZY-3' + # 用户登录 + token_s = dms_login() + # 判断定时任务是否在进行 + task = dms_task_record(token_s, collectionCode) + # 如果不是空说明正在进行 + if task is not None and len(task) > 0: + return + fileNameList = [] + dms_list = dms_sensing_data(token_s, collectionCode) + for dms in dms_list: + fileNameList.append(dms['fileName']) + dir_list = os.listdir(file_dir) + # 判断扫描出的文件和已有的文件,将多出的文件进行解析 + d = [y for y in dir_list if y not in fileNameList] + if d is None or len(d) == 0: + print('没有多余的遥感数据文件,终止程序') + return + file_total_size = "" + file_total_name = "" + xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径 + ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径 + for cur_file in d: + if 'ZY3' in cur_file[0:3] and os.path.splitext(cur_file)[1] == '.gz': + + # 获取文件的绝对路径 + path = os.path.join(file_dir, cur_file) + if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径 + # 解析遥感数据文件(demo) + GetZY3Data_dict = GetZY3Data(path, xmlPath, ThumbnailPath) + # 配置文件服务器参数 + url = Config.DFS_UPLOAD_URL + files = {'file': open(GetZY3Data_dict['xmlPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode} + # 上传生成的xml文件到文件服务器 + xml = requests.post(url, data=options, files=files) + + url = Config.DFS_UPLOAD_URL + files = {'file': open(GetZY3Data_dict['ThumbnailPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode} + # 上传生成的xml文件到文件服务器 + ThumbnailName = requests.post(url, data=options, files=files) + + CollectionCode = GetZY3Data_dict['CollectionCode'] + DirectoryDepth = GetZY3Data_dict['DirectoryDepth'] + StartTime = GetZY3Data_dict['ProduceTime'][0:10] + ' ' + GetZY3Data_dict['ProduceTime'][11:19] + uc = upload_client(path, DirectoryDepth, StartTime) + + StartTime = time.mktime(time.strptime(GetZY3Data_dict['StartTime'][0:10] + ' ' + GetZY3Data_dict['StartTime'][11:19], '%Y-%m-%d %H:%M:%S')) + EndTime = time.mktime(time.strptime(GetZY3Data_dict['EndTime'][0:10] + ' ' + GetZY3Data_dict['EndTime'][11:19], '%Y-%m-%d %H:%M:%S')) + + # 入库遥感数据 + res_data = httpUtil(url=Config.RESING_DATA_URL, + data={"collectionCode": collectionCode, "shootingTimeStartTs": StartTime, + "shootingTimeEndTs": EndTime, + "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'], + "fileSize": uc['file_size'], "cloudCoverage": GetZY3Data_dict['CloudPercent'], + "metaInformationFile": xml.json()['path'], + "thumbnailFile": ThumbnailName.json()['path'], + "remarks": "", "boundaryGeomStr": GetZY3Data_dict['boundaryGeomStr']}, + token=token_s).post_no_patam_herder() + print(res_data.json()['data']) + + file_total_size = file_total_size + str(uc['file_size']) + file_total_size = file_total_size + "," + + file_total_name = file_total_name + uc['fileName'] + file_total_name = file_total_name + "," + # print("========"+suffix) + print("{0} : is file!".format(cur_file)) + if os.path.isdir(path): + print("{0} : is dir!".format(cur_file)) + list_dir(path) # 递归子目录 + if uc['type'] == 'ok': + continue + # 添加遥感数据归档任务 + res = httpUtil(url=Config.DATA_TASK_URL, + data={"clientCode": "client1", "collectionCode": collectionCode, "storageFileList": file_total_name, + "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder() + task_code = res.json()['data'] + + # 结束遥感数据归档任务 + header = {"Authorization": token_s} + res = requests.post(url=Config.DATA_END_TASK_URL, + + params={"taskCode": task_code}, headers=header).json() + + +def scan_SNPP_dir(): + """ + 获取资源3号卫星元数据 + :return: + """ + file_dir = 'E:/数管' + print('开始扫描VNP02IMG数据集') + collectionCode = 'VNP02IMG' + # 用户登录 + token_s = dms_login() + # 判断定时任务是否在进行 + task = dms_task_record(token_s, collectionCode) + # 如果不是空说明正在进行 + if task is not None and len(task) > 0: + return + fileNameList = [] + dms_list = dms_sensing_data(token_s, collectionCode) + for dms in dms_list: + fileNameList.append(dms['fileName']) + dir_list = os.listdir(file_dir) + # 判断扫描出的文件和已有的文件,将多出的文件进行解析 + d = [y for y in dir_list if y not in fileNameList] + if d is None or len(d) == 0: + print('没有多余的遥感数据文件,终止程序') + return + file_total_size = "" + file_total_name = "" + xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径 + ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径 + for cur_file in d: + if 'VNP02IMG' in cur_file[0:8] and os.path.splitext(cur_file)[1] == '.nc': + + # 获取文件的绝对路径 + path = os.path.join(file_dir, cur_file) + if os.path.isfile(path): # 判断是否是文件还是目录需要用绝对路径 + # 解析遥感数据文件(demo) + GetSNPPData_dict = GetSNPPData(path, xmlPath, ThumbnailPath) + # 配置文件服务器参数 + url = Config.DFS_UPLOAD_URL + files = {'file': open(GetSNPPData_dict['xmlPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode} + # 上传生成的xml文件到文件服务器 + xml = requests.post(url, data=options, files=files) + + url = Config.DFS_UPLOAD_URL + files = {'file': open(GetSNPPData_dict['ThumbnailPath'], 'rb')} + options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collectionCode} + # 上传生成的xml文件到文件服务器 + ThumbnailName = requests.post(url, data=options, files=files) + + CollectionCode = GetSNPPData_dict['CollectionCode'] + DirectoryDepth = GetSNPPData_dict['DirectoryDepth'] + StartTime = GetSNPPData_dict['ProductionTime'][0:10] + ' ' + GetSNPPData_dict['ProductionTime'][11:19] + uc = upload_client(path, DirectoryDepth, StartTime) + + StartTime = time.mktime(time.strptime(GetSNPPData_dict['StartTime'][0:10] + ' ' + GetSNPPData_dict['StartTime'][11:19], '%Y-%m-%d %H:%M:%S')) + EndTime = time.mktime(time.strptime(GetSNPPData_dict['EndTime'][0:10] + ' ' + GetSNPPData_dict['EndTime'][11:19], '%Y-%m-%d %H:%M:%S')) + + # 入库遥感数据 + res_data = httpUtil(url=Config.RESING_DATA_URL, + data={"collectionCode": collectionCode, "shootingTimeStartTs": StartTime, + "shootingTimeEndTs": EndTime, + "fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'], + "fileSize": uc['file_size'], "cloudCoverage": GetSNPPData_dict['CloudPercent'], + "metaInformationFile": xml.json()['path'], + "thumbnailFile": ThumbnailName.json()['path'], + "remarks": "", "boundaryGeomStr": GetSNPPData_dict['boundaryGeomStr']}, + token=token_s).post_no_patam_herder() + print(res_data.json()['data']) + + file_total_size = file_total_size + str(uc['file_size']) + file_total_size = file_total_size + "," + + file_total_name = file_total_name + uc['fileName'] + file_total_name = file_total_name + "," + # print("========"+suffix) + print("{0} : is file!".format(cur_file)) + if os.path.isdir(path): + print("{0} : is dir!".format(cur_file)) + list_dir(path) # 递归子目录 + if uc['type'] == 'ok': + continue + # 添加遥感数据归档任务 + res = httpUtil(url=Config.DATA_TASK_URL, + data={"clientCode": "client1", "collectionCode": collectionCode, "storageFileList": file_total_name, + "storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder() + task_code = res.json()['data'] + + # 结束遥感数据归档任务 + header = {"Authorization": token_s} + res = requests.post(url=Config.DATA_END_TASK_URL, + + params={"taskCode": task_code}, headers=header).json() + + +if __name__ == '__main__': + # file_dir = 'C:/Users/HP/Desktop/Number tube/GF4_PMI_001/sensingdata' + # list_dir(file_dir) + # file_dir = 'E:/数管' + # + # scan_VJ102_dir() + # + # scan_VJ103_dir() + + # scan_GF1_PMS2_dir() + # scan_GF3MDJ_dir() + # scan_H08_dir() + # scan_Sentinel1_dir() + # scan_Sentinel2_dir() + # scan_Sentinel3OL_dir() + # scan_HJ1_dir() + # scan_ZY3_dir() + scan_SNPP_dir() \ No newline at end of file diff --git a/util/snow_ari.py b/util/snow_ari.py new file mode 100644 index 0000000..bfa380e --- /dev/null +++ b/util/snow_ari.py @@ -0,0 +1,103 @@ +import sys +import time +import logging + + +class MySnow(object): + + def __init__(self, datacenter_id, worker_id): + # 初始毫秒级时间戳(2021-06-09) + self.initial_time_stamp = int(time.mktime(time.strptime('2021-06-09 00:00:00', "%Y-%m-%d %H:%M:%S")) * 1000) + # 机器 ID 所占的位数 + self.worker_id_bits = 5 + # 数据表示 ID 所占的位数 + self.datacenter_id_bits = 5 + # 支持的最大机器 ID,结果是 31(这个位移算法可以很快的计算出几位二进制数所能表示的最大十进制数) + # 2**5-1 0b11111 + self.max_worker_id = -1 ^ (-1 << self.worker_id_bits) + # 支持最大标识 ID,结果是 31 + self.max_datacenter_id = -1 ^ (-1 << self.datacenter_id_bits) + # 序列号 ID所占的位数 + self.sequence_bits = 12 + # 机器 ID 偏移量(12) + self.workerid_offset = self.sequence_bits + # 数据中心 ID 偏移量(12 + 5) + self.datacenterid_offset = self.sequence_bits + self.datacenter_id_bits + # 时间戳偏移量(12 + 5 + 5) + self.timestamp_offset = self.sequence_bits + self.datacenter_id_bits + self.worker_id_bits + # 生成序列的掩码,这里为 4095(0b111111111111 = 0xfff = 4095) + self.sequence_mask = -1 ^ (-1 << self.sequence_bits) + + # 初始化日志 + self.logger = logging.getLogger('snowflake') + + # 数据中心 ID(0 ~ 31) + if datacenter_id > self.max_datacenter_id or datacenter_id < 0: + err_msg = 'datacenter_id 不能大于 %d 或小于 0' % self.max_worker_id + self.logger.error(err_msg) + sys.exit() + self.datacenter_id = datacenter_id + # 工作节点 ID(0 ~ 31) + if worker_id > self.max_worker_id or worker_id < 0: + err_msg = 'worker_id 不能大于 %d 或小于 0' % self.max_worker_id + self.logger.error(err_msg) + sys.exit() + self.worker_id = worker_id + # 毫秒内序列(0 ~ 4095) + self.sequence = 0 + # 上次生成 ID 的时间戳 + self.last_timestamp = -1 + + def _gen_timestamp(self): + """ + 生成整数毫秒级时间戳 + :return: 整数毫秒级时间戳 + """ + return int(time.time() * 1000) + + def next_id(self): + """ + 获得下一个ID (用同步锁保证线程安全) + :return: snowflake_id + """ + timestamp = self._gen_timestamp() + # 如果当前时间小于上一次 ID 生成的时间戳,说明系统时钟回退过这个时候应当抛出异常 + if timestamp < self.last_timestamp: + self.logger.error('clock is moving backwards. Rejecting requests until {}'.format(self.last_timestamp)) + # 如果是同一时间生成的,则进行毫秒内序列 + if timestamp == self.last_timestamp: + self.sequence = (self.sequence + 1) & self.sequence_mask + # sequence 等于 0 说明毫秒内序列已经增长到最大值 + if self.sequence == 0: + # 阻塞到下一个毫秒,获得新的时间戳 + timestamp = self._til_next_millis(self.last_timestamp) + else: + # 时间戳改变,毫秒内序列重置 + self.sequence = 0 + + # 上次生成 ID 的时间戳 + self.last_timestamp = timestamp + + # 移位并通过或运算拼到一起组成 64 位的 ID + new_id = ((timestamp - self.initial_time_stamp) << self.timestamp_offset) | \ + (self.datacenter_id << self.datacenterid_offset) | \ + (self.worker_id << self.workerid_offset) | \ + self.sequence + return new_id + + def _til_next_millis(self, last_timestamp): + """ + 阻塞到下一个毫秒,直到获得新的时间戳 + :param last_timestamp: 上次生成 ID 的毫秒级时间戳 + :return: 当前毫秒级时间戳 + """ + timestamp = self._gen_timestamp() + while timestamp <= last_timestamp: + timestamp = self._gen_timestamp() + return timestamp + + +if __name__ == '__main__': + mysnow = MySnow(1, 2) + id = mysnow.next_id() + print(id) diff --git a/util/xml_util.py b/util/xml_util.py new file mode 100644 index 0000000..5f09c01 --- /dev/null +++ b/util/xml_util.py @@ -0,0 +1,50 @@ +""" +Author : XinYi Song +Time : 2021/10/12 11:13 +Desc: xml工具类 +""" +from xml.dom.minidom import parseString +import dict2xml +import xmltodict +import json +import os + + +# 初始化数据,传入参数1、xml文件路径 2、xml格式字符串 3、json字符串 4、dict字典 +def init_data(file_Str_Dict): + if isinstance(file_Str_Dict, str) and os.path.isfile(file_Str_Dict): + with open(file_Str_Dict) as fp: + data = fp.read() + return data + elif isinstance(file_Str_Dict, (str, dict)): + data = file_Str_Dict + return data + + +# 读取xml文件,转换成字典 +def xml_to_dict(file_Str_Dict): + data = init_data(file_Str_Dict) + + data_orderedD = xmltodict.parse(data) + data_json = json.dumps(data_orderedD, indent=4) + data_dict = json.loads(data_json) + + return data_dict + + +# 将字典写入到xml中并保存 +def dict_to_xml(dict_in, xml_out): + xml_str = dict2xml.dict2xml(dict_in) + xml_raw = '\n' + '\n' + xml_str + '\n' + dom = parseString(xml_raw.replace('\n', '')) + pretty = dom.toprettyxml(indent=" ", newl="\n", encoding="utf-8") + with open(xml_out, 'w') as f: + f.write(pretty.decode("utf-8")) + f.close() + + +if __name__ == '__main__': + d = xml_to_dict("E:/xmltest/demo.xml") + print(d) + # a = {'ProductMetaData': {'SatelliteID': 'GF1', 'SensorID': 'PMS1', 'ReceiveTime': '2013-11-05 05:52:11', 'OrbitID': '2850', 'ProduceType': 'STANDARD', 'SceneID': '154627', 'ProductID': '107383', 'ProductLevel': 'LEVEL1A', 'ProductQuality': None, 'ProductQualityReport': None, 'ProductFormat': 'GEOTIFF', 'ProduceTime': '2013-11-05 17:27:10', 'Bands': '1,2,3,4', 'ScenePath': '65', 'SceneRow': '177', 'SatPath': '65', 'SatRow': '177', 'SceneCount': '1', 'SceneShift': '1', 'StartTime': '2013-11-05 13:52:08', 'EndTime': '2013-11-05 13:52:14', 'CenterTime': '2013-11-05 13:52:11', 'ImageGSD': '8', 'WidthInPixels': '4548', 'HeightInPixels': '4500', 'WidthInMeters': None, 'HeightInMeters': None, 'CloudPercent': '0', 'QualityInfo': None, 'PixelBits': None, 'ValidPixelBits': None, 'RollViewingAngle': '0', 'PitchViewingAngle': '0', 'RollSatelliteAngle': '9.56992e-05', 'PitchSatelliteAngle': '0.000105642', 'YawSatelliteAngle': '2.91257', 'SolarAzimuth': '169.08', 'SolarZenith': '33.6357', 'SatelliteAzimuth': '81.7259', 'SatelliteZenith': '88.1627', 'GainMode': 'G0,G4,G0,G0', 'IntegrationTime': '0.00115931', 'IntegrationLevel': 'S5,S3,S4,S3', 'MapProjection': None, 'EarthEllipsoid': None, 'ZoneNo': None, 'ResamplingKernel': None, 'HeightMode': None, 'MtfCorrection': 'LAB', 'RelativeCorrectionData': None, 'TopLeftLatitude': '40.1695', 'TopLeftLongitude': '78.2523', 'TopRightLatitude': '40.0948', 'TopRightLongitude': '78.6629', 'BottomRightLatitude': '39.7655', 'BottomRightLongitude': '78.5609', 'BottomLeftLatitude': '39.84', 'BottomLeftLongitude': '78.1522', 'TopLeftMapX': None, 'TopLeftMapY': None, 'TopRightMapX': None, 'TopRightMapY': None, 'BottomRightMapX': None, 'BottomRightMapY': None, 'BottomLeftMapX': None, 'BottomLeftMapY': None}} + # dict_to_xml(a, './res.xml') \ No newline at end of file diff --git a/util/zxby.py b/util/zxby.py new file mode 100644 index 0000000..261eaf7 --- /dev/null +++ b/util/zxby.py @@ -0,0 +1,78 @@ +""" +Author : XinYi Song +Time : 2021/10/9 9:43 +Desc: +""" +import os, sys, subprocess, tempfile, time + +# 创建临时文件夹,返回临时文件夹路径 +TempFile = tempfile.mkdtemp(suffix='_test', prefix='python_') +# 文件名 +FileNum = int(time.time() * 1000) +# python编译器位置 +EXEC = sys.executable + + +# 获取python版本 +def get_version(): + v = sys.version_info + version = "python %s.%s" % (v.major, v.minor) + return version + + +# 获得py文件名 +def get_pyname(): + global FileNum + return 'test_%d' % FileNum + + +# 接收代码写入文件 +def write_file(pyname, code): + fpath = os.path.join(TempFile, '%s.py' % pyname) + with open(fpath, 'w', encoding='utf-8') as f: + f.write(code) + print('file path: %s' % fpath) + return fpath + + +# 编码 +def decode(s): + try: + return s.decode('utf-8') + except UnicodeDecodeError: + return s.decode('gbk') + + # 主执行函数 + + +def main(code): + r = dict() + r["version"] = get_version() + pyname = get_pyname() + fpath = write_file(pyname, code) + try: + # subprocess.check_output 是 父进程等待子进程完成,返回子进程向标准输出的输出结果 + # stderr是标准输出的类型 + outdata = decode(subprocess.check_output([EXEC, fpath], stderr=subprocess.STDOUT, timeout=5)) + except subprocess.CalledProcessError as e: + # e.output是错误信息标准输出 + # 错误返回的数据 + r["code"] = 'Error' + r["output"] = decode(e.output) + return r + else: + # 成功返回的数据 + r['output'] = outdata + return r + finally: + # 删除文件(其实不用删除临时文件会自动删除) + try: + os.remove(fpath) + except Exception as e: + exit(1) + + +# if __name__ == '__main__': +# code = "print(11);print(22)" +# print(main(code)) +