优化flask
This commit is contained in:
parent
e1d50c3b72
commit
752cc7e19d
Binary file not shown.
7
api/hello.py
Normal file
7
api/hello.py
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
|
||||||
|
|
||||||
|
api = RedPrint('hello')
|
||||||
|
|
||||||
|
@api.route('/hello', methods=['GET'])
|
||||||
|
def index():
|
||||||
|
return '<H2>Hello , World</H2>'
|
16
app.py
16
app.py
@ -1,16 +0,0 @@
|
|||||||
from flask import Flask
|
|
||||||
|
|
||||||
from application import init_app
|
|
||||||
from common.config.factory import create_app
|
|
||||||
|
|
||||||
app = init_app('dev')
|
|
||||||
app = create_app()
|
|
||||||
|
|
||||||
|
|
||||||
@app.route('/')
|
|
||||||
def hello_world():
|
|
||||||
return 'Hello World!'
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
app.run()
|
|
@ -1,15 +1,10 @@
|
|||||||
# 项目初始化文件
|
# 项目初始化文件
|
||||||
import logging
|
|
||||||
from logging.handlers import RotatingFileHandler
|
|
||||||
from flask import Flask
|
from flask import Flask
|
||||||
from redis import StrictRedis
|
|
||||||
from flask_wtf.csrf import CSRFProtect
|
|
||||||
from flask_session import Session
|
from flask_session import Session
|
||||||
from flask_sqlalchemy import SQLAlchemy
|
from flask_wtf.csrf import CSRFProtect
|
||||||
|
|
||||||
from application.settings.dev import DevelopmentConfig
|
from application.settings.dev import DevelopmentConfig
|
||||||
from application.settings.prop import ProductionConfig
|
from application.settings.prop import ProductionConfig
|
||||||
from common.config.factory import create_app
|
|
||||||
|
|
||||||
config = {
|
config = {
|
||||||
'dev': DevelopmentConfig,
|
'dev': DevelopmentConfig,
|
||||||
|
@ -3,14 +3,22 @@ Author : XinYi Song
|
|||||||
Time : 2021/11/23 9:47
|
Time : 2021/11/23 9:47
|
||||||
Desc:
|
Desc:
|
||||||
"""
|
"""
|
||||||
from flask import Flask
|
from flask import Flask, Blueprint
|
||||||
from flask_apscheduler import APScheduler
|
from flask_apscheduler import APScheduler
|
||||||
|
from flask_cors import CORS
|
||||||
|
|
||||||
scheduler = APScheduler()
|
scheduler = APScheduler()
|
||||||
|
|
||||||
|
|
||||||
|
def init_blueprint_v1():
|
||||||
|
bp_v1 = Blueprint('/', __name__)
|
||||||
|
# user.api.register(bp_v1)
|
||||||
|
return bp_v1
|
||||||
|
|
||||||
|
|
||||||
def create_app():
|
def create_app():
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
|
app.register_blueprint(init_blueprint_v1(), url_prefix='/')
|
||||||
# 配置任务,不然无法启动任务
|
# 配置任务,不然无法启动任务
|
||||||
app.config.update(
|
app.config.update(
|
||||||
{
|
{
|
||||||
@ -80,6 +88,7 @@ def create_app():
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
CORS(app)
|
||||||
scheduler.init_app(app)
|
scheduler.init_app(app)
|
||||||
scheduler.start()
|
scheduler.start()
|
||||||
return app
|
return app
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
|
import io
|
||||||
from xml.dom import minidom
|
from xml.dom import minidom
|
||||||
from osgeo import gdal
|
from osgeo import gdal
|
||||||
from osgeo import ogr
|
|
||||||
from osgeo import gdalconst
|
|
||||||
import h5py
|
import h5py
|
||||||
from PIL import Image
|
from PIL import Image
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import tarfile
|
|
||||||
import zipfile
|
|
||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
import io
|
|
||||||
import sys
|
import sys
|
||||||
|
import time
|
||||||
|
import datetime
|
||||||
|
import tarfile
|
||||||
|
import zipfile
|
||||||
|
|
||||||
|
|
||||||
def exe_path():
|
def exe_path():
|
||||||
@ -27,6 +27,14 @@ def exe_path():
|
|||||||
|
|
||||||
os.environ['PROJ_LIB'] = exe_path() + "/PROJ"
|
os.environ['PROJ_LIB'] = exe_path() + "/PROJ"
|
||||||
|
|
||||||
|
def GetTimestamp(date) :
|
||||||
|
# 转换成时间数组
|
||||||
|
timeArray = time.strptime(date, "%Y-%m-%d %H:%M:%S")
|
||||||
|
# 转换成时间戳
|
||||||
|
timestamp = time.mktime(timeArray)
|
||||||
|
|
||||||
|
return timestamp
|
||||||
|
|
||||||
|
|
||||||
def uint16to8(bands, lower_percent=0.001, higher_percent=99.999):
|
def uint16to8(bands, lower_percent=0.001, higher_percent=99.999):
|
||||||
"""
|
"""
|
||||||
@ -903,7 +911,7 @@ def GetSentinel2Data(in_file, xml_path, thumbnail_path):
|
|||||||
try:
|
try:
|
||||||
in_path, basename = os.path.split(in_file)
|
in_path, basename = os.path.split(in_file)
|
||||||
with zipfile.ZipFile(in_file, 'r', zipfile.ZIP_DEFLATED) as zip_file:
|
with zipfile.ZipFile(in_file, 'r', zipfile.ZIP_DEFLATED) as zip_file:
|
||||||
extensions = ('_B02_60m.jp2', '_B03_60m.jp2', '_B04_60m.jp2', '.SAFE/MTD_MSIL2A.xml')
|
extensions = ('_B02.jp2', '_B03.jp2', '_B04.jp2', '.SAFE/MTD_MSIL1C')
|
||||||
file_list = [file for file in zip_file.namelist() if file.endswith(extensions)]
|
file_list = [file for file in zip_file.namelist() if file.endswith(extensions)]
|
||||||
file_list.sort()
|
file_list.sort()
|
||||||
|
|
||||||
@ -957,7 +965,7 @@ def GetSentinel2Data(in_file, xml_path, thumbnail_path):
|
|||||||
del img
|
del img
|
||||||
|
|
||||||
# 解压多光谱XML文件
|
# 解压多光谱XML文件
|
||||||
if file_list[3].endswith('.SAFE/MTD_MSIL2A.xml'):
|
if file_list[3].endswith('.SAFE/MTD_MSIL1C'):
|
||||||
# 生成XML文件
|
# 生成XML文件
|
||||||
xmlFileName = os.path.splitext(basename)[0] + ".xml"
|
xmlFileName = os.path.splitext(basename)[0] + ".xml"
|
||||||
xmlPath = os.path.join(xml_path, xmlFileName)
|
xmlPath = os.path.join(xml_path, xmlFileName)
|
||||||
@ -1489,66 +1497,529 @@ def GetZY3Data(in_file, xml_path, thumbnail_path):
|
|||||||
return {"code": -1, "msg": str(e)}
|
return {"code": -1, "msg": str(e)}
|
||||||
|
|
||||||
|
|
||||||
|
def GetLandsatData(in_file, thumbnail_path, txt_path) :
|
||||||
|
try :
|
||||||
|
in_path, basename = os.path.split(in_file)
|
||||||
|
with tarfile.open(in_file, mode='r') as tar_file :
|
||||||
|
extensions = ('_B2.TIF', '_B3.TIF', '_B4.TIF', '_MTL.txt')
|
||||||
|
file_list = [file for file in tar_file.getnames() if file.endswith(extensions)]
|
||||||
|
file_list.sort()
|
||||||
|
|
||||||
|
# 生成缩略图
|
||||||
|
ThumbnailName = os.path.splitext(basename)[0] + "_thumb.jpg"
|
||||||
|
ThumbnailPath = os.path.join(thumbnail_path, ThumbnailName)
|
||||||
|
|
||||||
|
rgb_list = []
|
||||||
|
for file in file_list[:3] :
|
||||||
|
path = '/vsitar/%s/%s' % (in_file, file)
|
||||||
|
sub_dataset = gdal.Open(path)
|
||||||
|
sub_array = sub_dataset.ReadAsArray()
|
||||||
|
rgb_list.append(sub_array)
|
||||||
|
img_data = np.array([rgb_list[2], rgb_list[1], rgb_list[0]])
|
||||||
|
img_data = uint16to8(img_data)
|
||||||
|
|
||||||
|
# Array转Image
|
||||||
|
img_data2 = np.transpose(img_data, (1, 2, 0))
|
||||||
|
img_data2 = img_data2[:, :, : :-1]
|
||||||
|
img = Image.fromarray(img_data2)
|
||||||
|
# 压缩图片大小
|
||||||
|
if img_data.shape[1] > img_data.shape[2] :
|
||||||
|
width = 512
|
||||||
|
height = int(width / img_data.shape[1] * img_data.shape[2])
|
||||||
|
else :
|
||||||
|
height = 512
|
||||||
|
width = int(height / img_data.shape[1] * img_data.shape[2])
|
||||||
|
img.thumbnail((width, height))
|
||||||
|
img.save(ThumbnailPath, "PNG")
|
||||||
|
|
||||||
|
# 释放内存
|
||||||
|
del rgb_list
|
||||||
|
del img_data
|
||||||
|
del img_data2
|
||||||
|
del img
|
||||||
|
|
||||||
|
Landsat_dict = {}
|
||||||
|
|
||||||
|
if file_list[3].endswith('_MTL.txt') :
|
||||||
|
# 读取信息
|
||||||
|
TxTPath = os.path.join(txt_path, file_list[3])
|
||||||
|
tar_file.extract(file_list[3], txt_path)
|
||||||
|
|
||||||
|
fr = open(TxTPath, 'r')
|
||||||
|
dic = {}
|
||||||
|
keys = [] # 用来存储读取的顺序
|
||||||
|
for line in fr :
|
||||||
|
if '=' in line :
|
||||||
|
v = line.strip().split(' = ')
|
||||||
|
dic[v[0]] = v[1]
|
||||||
|
keys.append(v[0])
|
||||||
|
fr.close()
|
||||||
|
|
||||||
|
# 读取信息
|
||||||
|
cloud_percent = dic['CLOUD_COVER']
|
||||||
|
ImageGSD = '30'
|
||||||
|
ProjectedCoordinates = dic['MAP_PROJECTION']
|
||||||
|
|
||||||
|
# 产品日期
|
||||||
|
FILE_DATE = dic['FILE_DATE']
|
||||||
|
FILE_DATE = FILE_DATE.split("Z")[0].replace("T", " ")
|
||||||
|
ProduceTime = str(GetTimestamp(FILE_DATE))
|
||||||
|
|
||||||
|
# 经纬度
|
||||||
|
TopLeftLatitude = dic['CORNER_UL_LAT_PRODUCT'] # 左上纬度
|
||||||
|
TopLeftLongitude = dic['CORNER_UL_LON_PRODUCT'] # 左上经度
|
||||||
|
TopRightLatitude = dic['CORNER_UR_LAT_PRODUCT'] # 右上纬度
|
||||||
|
TopRightLongitude = dic['CORNER_UR_LON_PRODUCT'] # 右上经度
|
||||||
|
BottomRightLatitude = dic['CORNER_LR_LAT_PRODUCT'] # 右下纬度
|
||||||
|
BottomRightLongitude = dic['CORNER_LR_LON_PRODUCT'] # 右下经度
|
||||||
|
BottomLeftLatitude = dic['CORNER_LL_LAT_PRODUCT'] # 左下纬度
|
||||||
|
BottomLeftLongitude = dic['CORNER_LL_LON_PRODUCT'] # 左下经度
|
||||||
|
|
||||||
|
boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \
|
||||||
|
f'{TopRightLongitude} {TopRightLatitude},' \
|
||||||
|
f'{BottomRightLongitude} {BottomRightLatitude},' \
|
||||||
|
f'{BottomLeftLongitude} {BottomLeftLatitude},' \
|
||||||
|
f'{TopLeftLongitude} {TopLeftLatitude}))'
|
||||||
|
|
||||||
|
# 生成字典
|
||||||
|
Landsat_dict = {"ProduceTime" : ProduceTime,
|
||||||
|
"StartTime" : "",
|
||||||
|
"StopTime" : "",
|
||||||
|
"CloudPercent" : cloud_percent,
|
||||||
|
"boundaryGeomStr" : boundaryGeomStr,
|
||||||
|
# "bands" : "1,2,3,4,5,6,7,8,9,10,11,12",
|
||||||
|
"ImageGSD" : ImageGSD,
|
||||||
|
# "ProjectedCoordinates" : ProjectedCoordinates,
|
||||||
|
# "CollectionCode" : '',
|
||||||
|
"ThumbnailName" : ThumbnailName,
|
||||||
|
"ThumbnailPath" : ThumbnailPath,
|
||||||
|
"xmlPath" : TxTPath,
|
||||||
|
"xmlFileName" : file_list[3],
|
||||||
|
"DirectoryDepth" : "month"}
|
||||||
|
|
||||||
|
if not Landsat_dict :
|
||||||
|
return {"code" : -1, "msg" : "没有满足条件的数据字典..."}
|
||||||
|
return Landsat_dict
|
||||||
|
|
||||||
|
except Exception as e :
|
||||||
|
print(str(e))
|
||||||
|
return {"code" : -1, "msg" : str(e)}
|
||||||
|
|
||||||
|
|
||||||
|
def GetSentinel2Data(in_file, xml_path, thumbnail_path) :
|
||||||
|
"""
|
||||||
|
获取哨兵2卫星元数据
|
||||||
|
:param thumbnail_path:
|
||||||
|
:param in_file:
|
||||||
|
:return: 元数据字典
|
||||||
|
"""
|
||||||
|
try :
|
||||||
|
in_path, basename = os.path.split(in_file)
|
||||||
|
with zipfile.ZipFile(in_file, 'r', zipfile.ZIP_DEFLATED) as zip_file :
|
||||||
|
extensions = ('_B02.jp2', '_B03.jp2', '_B04.jp2', 'MTD_MSIL1C.xml')
|
||||||
|
file_list = [file for file in zip_file.namelist() if file.endswith(extensions)]
|
||||||
|
file_list.sort()
|
||||||
|
|
||||||
|
# 生成缩略图
|
||||||
|
ThumbnailName = os.path.splitext(basename)[0] + "_thumb.jpg"
|
||||||
|
ThumbnailPath = os.path.join(thumbnail_path, ThumbnailName)
|
||||||
|
|
||||||
|
rgb_list = []
|
||||||
|
for file in file_list[:3] :
|
||||||
|
sub_dataset = gdal.Open('/vsizip/%s/%s' % (in_file, file))
|
||||||
|
sub_array = sub_dataset.ReadAsArray()
|
||||||
|
rgb_list.append(sub_array)
|
||||||
|
img_data = np.array([rgb_list[2], rgb_list[1], rgb_list[0]])
|
||||||
|
img_data = uint16to8(img_data)
|
||||||
|
|
||||||
|
# Array转Image
|
||||||
|
img_data2 = np.transpose(img_data, (1, 2, 0))
|
||||||
|
img_data2 = img_data2[:, :, : :-1]
|
||||||
|
img = Image.fromarray(img_data2)
|
||||||
|
# 压缩图片大小
|
||||||
|
if img_data.shape[1] > img_data.shape[2] :
|
||||||
|
width = 512
|
||||||
|
height = int(width / img_data.shape[1] * img_data.shape[2])
|
||||||
|
else :
|
||||||
|
height = 512
|
||||||
|
width = int(height / img_data.shape[1] * img_data.shape[2])
|
||||||
|
img.thumbnail((width, height))
|
||||||
|
img.save(ThumbnailPath, "PNG")
|
||||||
|
|
||||||
|
# 释放内存
|
||||||
|
del rgb_list
|
||||||
|
del img_data
|
||||||
|
del img_data2
|
||||||
|
del img
|
||||||
|
|
||||||
|
S2_dict = {}
|
||||||
|
|
||||||
|
if file_list[3].endswith('MTD_MSIL1C.xml') :
|
||||||
|
# 生成xml文件
|
||||||
|
xmlFileName = os.path.splitext(basename)[0] + ".xml"
|
||||||
|
xmlPath = os.path.join(xml_path, xmlFileName)
|
||||||
|
meta_data = zip_file.read(file_list[3])
|
||||||
|
with open(xmlPath, "wb") as fout :
|
||||||
|
fout.write(meta_data)
|
||||||
|
|
||||||
|
# 读取信息
|
||||||
|
meta_content = zip_file.open(file_list[3])
|
||||||
|
dom = minidom.parse(meta_content)
|
||||||
|
cloud_percent = dom.getElementsByTagName('n1:Quality_Indicators_Info')[
|
||||||
|
0].getElementsByTagName('Cloud_Coverage_Assessment')[0].firstChild.data
|
||||||
|
ImageGSD = '10, 20, 60'
|
||||||
|
|
||||||
|
# 产品日期
|
||||||
|
GENERATION_TIME = dom.getElementsByTagName('n1:General_Info')[0].getElementsByTagName('Product_Info')[
|
||||||
|
0].getElementsByTagName('GENERATION_TIME')[0].firstChild.data
|
||||||
|
GENERATION_TIME = GENERATION_TIME.split(".")[0].replace("T", " ")
|
||||||
|
ProduceTime = str(GetTimestamp(GENERATION_TIME))
|
||||||
|
|
||||||
|
PRODUCT_START_TIME = \
|
||||||
|
dom.getElementsByTagName('n1:General_Info')[0].getElementsByTagName('Product_Info')[
|
||||||
|
0].getElementsByTagName('PRODUCT_START_TIME')[0].firstChild.data
|
||||||
|
PRODUCT_START_TIME = PRODUCT_START_TIME.split(".")[0].replace("T", " ")
|
||||||
|
StartTime = str(GetTimestamp(PRODUCT_START_TIME))
|
||||||
|
|
||||||
|
PRODUCT_STOP_TIME = dom.getElementsByTagName('n1:General_Info')[0].getElementsByTagName('Product_Info')[
|
||||||
|
0].getElementsByTagName('PRODUCT_STOP_TIME')[0].firstChild.data
|
||||||
|
PRODUCT_STOP_TIME = PRODUCT_STOP_TIME.split(".")[0].replace("T", " ")
|
||||||
|
StopTime = str(GetTimestamp(PRODUCT_STOP_TIME))
|
||||||
|
|
||||||
|
# 经纬度
|
||||||
|
lon_lat = dom.getElementsByTagName('n1:Geometric_Info')[0].getElementsByTagName('Product_Footprint')[
|
||||||
|
0].getElementsByTagName('Product_Footprint')[0].getElementsByTagName('Global_Footprint')[
|
||||||
|
0].getElementsByTagName('EXT_POS_LIST')[0].firstChild.data
|
||||||
|
TopLeftLatitude = re.split(r'\s', lon_lat)[0] # 左上纬度
|
||||||
|
TopLeftLongitude = re.split(r'\s', lon_lat)[1] # 左上经度
|
||||||
|
TopRightLatitude = re.split(r'\s', lon_lat)[2] # 右上纬度
|
||||||
|
TopRightLongitude = re.split(r'\s', lon_lat)[3] # 右上经度
|
||||||
|
BottomRightLatitude = re.split(r'\s', lon_lat)[4] # 右下纬度
|
||||||
|
BottomRightLongitude = re.split(r'\s', lon_lat)[5] # 右下经度
|
||||||
|
BottomLeftLatitude = re.split(r'\s', lon_lat)[6] # 左下纬度
|
||||||
|
BottomLeftLongitude = re.split(r'\s', lon_lat)[7] # 左下经度
|
||||||
|
|
||||||
|
boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \
|
||||||
|
f'{TopRightLongitude} {TopRightLatitude},' \
|
||||||
|
f'{BottomRightLongitude} {BottomRightLatitude},' \
|
||||||
|
f'{BottomLeftLongitude} {BottomLeftLatitude},' \
|
||||||
|
f'{TopLeftLongitude} {TopLeftLatitude}))'
|
||||||
|
|
||||||
|
# 生成字典
|
||||||
|
S2_dict = {"ProduceTime" : ProduceTime,
|
||||||
|
"StartTime" : StartTime,
|
||||||
|
"StopTime" : StopTime,
|
||||||
|
"CloudPercent" : cloud_percent,
|
||||||
|
"boundaryGeomStr" : boundaryGeomStr,
|
||||||
|
"ImageGSD" : ImageGSD,
|
||||||
|
"ThumbnailName" : ThumbnailName,
|
||||||
|
"ThumbnailPath" : ThumbnailPath,
|
||||||
|
"xmlPath" : file_list[3],
|
||||||
|
"xmlFileName" : xmlFileName,
|
||||||
|
"DirectoryDepth" : "month"}
|
||||||
|
|
||||||
|
if not S2_dict :
|
||||||
|
return {"code" : -1, "msg" : "没有满足条件的数据字典..."}
|
||||||
|
return S2_dict
|
||||||
|
except Exception as e :
|
||||||
|
print(str(e))
|
||||||
|
return {"code" : -1, "msg" : str(e)}
|
||||||
|
|
||||||
|
|
||||||
|
def GetModisData(in_file) :
|
||||||
|
"""
|
||||||
|
获取MODIS卫星元数据
|
||||||
|
:param in_file:
|
||||||
|
:return: 元数据字典
|
||||||
|
"""
|
||||||
|
try :
|
||||||
|
datasets = gdal.Open(in_file)
|
||||||
|
# 获取hdf中的元数据
|
||||||
|
Metadata = datasets.GetMetadata()
|
||||||
|
|
||||||
|
# 获取信息
|
||||||
|
PRODUCTIONDATETIME = Metadata["PRODUCTIONDATETIME"]
|
||||||
|
PRODUCTIONDATETIME = PRODUCTIONDATETIME.split(".")[0].replace("T", " ")
|
||||||
|
ProductionTime = str(GetTimestamp(PRODUCTIONDATETIME))
|
||||||
|
|
||||||
|
RANGEBEGINNINGDATE = Metadata["RANGEBEGINNINGDATE"] + " " + Metadata["RANGEBEGINNINGTIME"]
|
||||||
|
StartTime = str(GetTimestamp(RANGEBEGINNINGDATE))
|
||||||
|
|
||||||
|
RANGEENDINGDATE = Metadata["RANGEENDINGDATE"] + " " + Metadata["RANGEENDINGTIME"]
|
||||||
|
EndTime = str(GetTimestamp(RANGEENDINGDATE))
|
||||||
|
|
||||||
|
Latitudes = Metadata["GRINGPOINTLATITUDE.1"] # 获取四个角的维度
|
||||||
|
LatitudesList = Latitudes.split(", ") # 采用", "进行分割
|
||||||
|
Longitude = Metadata["GRINGPOINTLONGITUDE.1"] # 获取四个角的经度
|
||||||
|
LongitudeList = Longitude.split(", ") # 采用", "进行分割
|
||||||
|
TopLeftLatitude = LatitudesList[0]
|
||||||
|
TopLeftLongitude = LongitudeList[0]
|
||||||
|
TopRightLatitude = LatitudesList[1]
|
||||||
|
TopRightLongitude = LongitudeList[1]
|
||||||
|
BottomRightLatitude = LatitudesList[2]
|
||||||
|
BottomRightLongitude = LongitudeList[2]
|
||||||
|
BottomLeftLatitude = LatitudesList[3]
|
||||||
|
BottomLeftLongitude = LongitudeList[3] # 获取经纬度
|
||||||
|
|
||||||
|
boundaryGeomStr = f'POLYGON(({TopLeftLongitude} {TopLeftLatitude},' \
|
||||||
|
f'{TopRightLongitude} {TopRightLatitude},' \
|
||||||
|
f'{BottomRightLongitude} {BottomRightLatitude},' \
|
||||||
|
f'{BottomLeftLongitude} {BottomLeftLatitude},' \
|
||||||
|
f'{TopLeftLongitude} {TopLeftLatitude}))'
|
||||||
|
|
||||||
|
DirectoryDepth = Metadata["DAYNIGHTFLAG"]
|
||||||
|
|
||||||
|
modis_dict = {"ProduceTime" : ProductionTime,
|
||||||
|
"StartTime" : StartTime,
|
||||||
|
"EndTime" : EndTime,
|
||||||
|
"ImageGSD" : "",
|
||||||
|
"CloudPercent" : "0",
|
||||||
|
'boundaryGeomStr' : boundaryGeomStr,
|
||||||
|
"ThumbnailPath" : "",
|
||||||
|
"ThumbnailName" : "",
|
||||||
|
"xmlPath" : "",
|
||||||
|
"xmlFileName" : "",
|
||||||
|
"DirectoryDepth" : DirectoryDepth
|
||||||
|
}
|
||||||
|
|
||||||
|
if not modis_dict :
|
||||||
|
return {"code" : -1, "msg" : "没有满足条件的数据字典..."}
|
||||||
|
return modis_dict
|
||||||
|
except Exception as e :
|
||||||
|
print(str(e))
|
||||||
|
return {"code" : -1, "msg" : str(e)}
|
||||||
|
|
||||||
|
|
||||||
|
def GetGOCIData(in_file) :
|
||||||
|
"""
|
||||||
|
获取GOCI卫星元数据
|
||||||
|
:param in_file:
|
||||||
|
:return: 元数据字典
|
||||||
|
"""
|
||||||
|
try :
|
||||||
|
# 读取信息
|
||||||
|
gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "YES")
|
||||||
|
in_datasets = gdal.Open(in_file)
|
||||||
|
meta_data = in_datasets.GetMetadata()
|
||||||
|
|
||||||
|
# 产品日期
|
||||||
|
date_created = meta_data['HDFEOS_POINTS_Ephemeris_Scene_center_time']
|
||||||
|
date_created = date_created.split(".")[0]
|
||||||
|
date_created = str(datetime.datetime.strptime(date_created, "%d-%b-%Y %H:%M:%S"))
|
||||||
|
ProductionTime = str(GetTimestamp(date_created))
|
||||||
|
|
||||||
|
start_time = meta_data['HDFEOS_POINTS_Ephemeris_Scene_Start_time']
|
||||||
|
start_time = start_time.split(".")[0]
|
||||||
|
start_time = str(datetime.datetime.strptime(start_time, "%d-%b-%Y %H:%M:%S"))
|
||||||
|
StartTime = str(GetTimestamp(start_time))
|
||||||
|
|
||||||
|
end_time = meta_data['HDFEOS_POINTS_Ephemeris_Scene_end_time']
|
||||||
|
end_time = end_time.split(".")[0]
|
||||||
|
end_time = str(datetime.datetime.strptime(end_time, "%d-%b-%Y %H:%M:%S"))
|
||||||
|
EndTime = str(GetTimestamp(end_time))
|
||||||
|
|
||||||
|
ImageGSD = meta_data['HDFEOS_POINTS_Scene_Header_pixel_spacing'].split(" ")[0]
|
||||||
|
|
||||||
|
# 经纬度
|
||||||
|
upper_left_latitude = meta_data['HDFEOS_POINTS_Scene_Header_Scene_upper-left_latitude']
|
||||||
|
upper_left_longitude = meta_data['HDFEOS_POINTS_Scene_Header_Scene_upper-left_longitude']
|
||||||
|
upper_right_latitude = meta_data['HDFEOS_POINTS_Scene_Header_Scene_upper-right_latitude']
|
||||||
|
upper_right_longitude = meta_data['HDFEOS_POINTS_Scene_Header_Scene_upper-right_longitude']
|
||||||
|
lower_right_latitude = meta_data['HDFEOS_POINTS_Scene_Header_Scene_lower-right_latitude']
|
||||||
|
lower_right_longitude = meta_data['HDFEOS_POINTS_Scene_Header_Scene_lower-right_longitude']
|
||||||
|
lower_left_latitude = meta_data['HDFEOS_POINTS_Scene_Header_Scene_lower-left_latitude']
|
||||||
|
lower_left_longitude = meta_data['HDFEOS_POINTS_Scene_Header_Scene_lower-left_longitude']
|
||||||
|
|
||||||
|
boundaryGeomStr = f'POLYGON(({upper_left_longitude} {upper_left_latitude},' \
|
||||||
|
f'{upper_right_longitude} {upper_right_latitude},' \
|
||||||
|
f'{lower_right_longitude} {lower_right_latitude},' \
|
||||||
|
f'{lower_left_longitude} {lower_left_latitude},' \
|
||||||
|
f'{upper_left_longitude} {upper_left_latitude}))'
|
||||||
|
|
||||||
|
# 构建字典
|
||||||
|
GOCI_dict = {"ProduceTime" : ProductionTime,
|
||||||
|
"StartTime" : StartTime,
|
||||||
|
"EndTime" : EndTime,
|
||||||
|
"CloudPercent" : "0",
|
||||||
|
"boundaryGeomStr" : boundaryGeomStr,
|
||||||
|
"ImageGSD" : ImageGSD,
|
||||||
|
"ThumbnailPath" : "",
|
||||||
|
"ThumbnailName" : "",
|
||||||
|
"xmlPath" : "",
|
||||||
|
"xmlFileName" : "",
|
||||||
|
"DirectoryDepth" : "day"}
|
||||||
|
|
||||||
|
# 判断字典是否为空
|
||||||
|
if not GOCI_dict :
|
||||||
|
return {"code" : -1, "msg" : "没有满足条件的数据字典..."}
|
||||||
|
return GOCI_dict
|
||||||
|
except Exception as e :
|
||||||
|
print(str(e))
|
||||||
|
return {"code" : -1, "msg" : str(e)}
|
||||||
|
|
||||||
|
|
||||||
|
def GetGK2BData(in_file) :
|
||||||
|
"""
|
||||||
|
获取GK2B_GOCI卫星元数据
|
||||||
|
:param in_file:
|
||||||
|
:return: 元数据字典
|
||||||
|
"""
|
||||||
|
try :
|
||||||
|
# 读取信息
|
||||||
|
gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "YES")
|
||||||
|
in_datasets = gdal.Open(in_file)
|
||||||
|
meta_data = in_datasets.GetMetadata()
|
||||||
|
|
||||||
|
# 产品日期
|
||||||
|
produc_time = meta_data['NC_GLOBAL#slot_acquisition_time'].replace("_", "")
|
||||||
|
produc_time = produc_time[0 :4] + "-" + produc_time[4 :6] + "-" + produc_time[6 :8] + " " + \
|
||||||
|
produc_time[8 :10] + ":" + produc_time[10 :12] + ":" + produc_time[12 :14]
|
||||||
|
ProductionTime = str(GetTimestamp(produc_time))
|
||||||
|
|
||||||
|
start_time = meta_data['NC_GLOBAL#observation_start_time'].replace("_", "")
|
||||||
|
start_time = start_time[0 :4] + "-" + start_time[4 :6] + "-" + start_time[6 :8] + " " + \
|
||||||
|
start_time[8 :10] + ":" + start_time[10 :12] + ":" + start_time[12 :14]
|
||||||
|
StartTime = str(GetTimestamp(start_time))
|
||||||
|
|
||||||
|
end_time = meta_data['NC_GLOBAL#observation_end_time'].replace("_", "")
|
||||||
|
end_time = end_time[0 :4] + "-" + end_time[4 :6] + "-" + end_time[6 :8] + " " + \
|
||||||
|
end_time[8 :10] + ":" + end_time[10 :12] + ":" + end_time[12 :14]
|
||||||
|
EndTime = str(GetTimestamp(end_time))
|
||||||
|
|
||||||
|
# 其他信息
|
||||||
|
ImageGSD = meta_data['NC_GLOBAL#geospatial_lat_resolution'].split(" ")[0]
|
||||||
|
|
||||||
|
# 中心经纬度
|
||||||
|
productUpperLeftLat = meta_data['NC_GLOBAL#image_upperleft_latitude'] # 左上纬度
|
||||||
|
productUpperLeftLong = meta_data['NC_GLOBAL#image_upperleft_longitude'] # 左上经度
|
||||||
|
productUpperRightLat = meta_data['NC_GLOBAL#image_upperleft_latitude'] # 右上纬度
|
||||||
|
productUpperRightLong = meta_data['NC_GLOBAL#image_lowerright_longitude'] # 右上经度
|
||||||
|
productLowerLeftLat = meta_data['NC_GLOBAL#image_lowerright_latitude'] # 左下纬度
|
||||||
|
productLowerLeftLong = meta_data['NC_GLOBAL#image_upperleft_longitude'] # 左下经度
|
||||||
|
productLowerRightLat = meta_data['NC_GLOBAL#image_lowerright_latitude'] # 右下纬度
|
||||||
|
productLowerRightLong = meta_data['NC_GLOBAL#image_lowerright_longitude'] # 右下经度
|
||||||
|
|
||||||
|
# 边界几何
|
||||||
|
boundaryGeomStr = f'POLYGON(({productUpperLeftLong} {productUpperLeftLat},' \
|
||||||
|
f'{productUpperRightLong} {productUpperRightLat},' \
|
||||||
|
f'{productLowerRightLong} {productLowerRightLat},' \
|
||||||
|
f'{productLowerLeftLong} {productLowerLeftLat},' \
|
||||||
|
f'{productUpperLeftLong} {productUpperLeftLat}))'
|
||||||
|
|
||||||
|
# 构建字典
|
||||||
|
GK2B_dict = {"ProduceTime" : ProductionTime,
|
||||||
|
"StartTime" : StartTime,
|
||||||
|
"EndTime" : EndTime,
|
||||||
|
"CloudPercent" : "0",
|
||||||
|
"boundaryGeomStr" : boundaryGeomStr,
|
||||||
|
"ImageGSD" : ImageGSD,
|
||||||
|
"ThumbnailPath" : "",
|
||||||
|
"ThumbnailName" : "",
|
||||||
|
"xmlPath" : "",
|
||||||
|
"xmlFileName" : "",
|
||||||
|
"DirectoryDepth" : "day"}
|
||||||
|
|
||||||
|
# 判断字典是否为空
|
||||||
|
if not GK2B_dict :
|
||||||
|
return {"code" : -1, "msg" : "没有满足条件的数据字典..."}
|
||||||
|
return GK2B_dict
|
||||||
|
except Exception as e :
|
||||||
|
print(str(e))
|
||||||
|
return {"code" : -1, "msg" : str(e)}
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
HJ1FilePath = r"Y:\不同传感器数据\HJ-1\HJ1A-CCD2-450-80-20090501-L20000106616.tar.gz"
|
# HJ1FilePath = r"Y:\不同传感器数据\HJ-1\HJ1A-CCD2-450-80-20090501-L20000106616.tar.gz"
|
||||||
JPSSFilePath = r"Y:\不同传感器数据\JPSS\VJ102IMG.A2021159.0542.002.2021159094907.nc"
|
# JPSSFilePath = r"Y:\不同传感器数据\JPSS\VJ102IMG.A2021159.0542.002.2021159094907.nc"
|
||||||
ZY2FilePath = r"Y:\不同传感器数据\ZY-2\ZY02C_PMS_E115.9_N36.2_20120422_L2C0000391981.tar.gz"
|
# ZY2FilePath = r"Y:\不同传感器数据\ZY-2\ZY02C_PMS_E115.9_N36.2_20120422_L2C0000391981.tar.gz"
|
||||||
ZY3FilePath = r"Y:\不同传感器数据\ZY-3\ZY3_MUX_E83.3_N43.3_20120405_L2A0000301226.tar.gz"
|
# ZY3FilePath = r"Y:\不同传感器数据\ZY-3\ZY3_MUX_E83.3_N43.3_20120405_L2A0000301226.tar.gz"
|
||||||
|
#
|
||||||
S1FilePath = r'Y:\不同传感器数据\SENTINEL-1\S1A_IW_GRDH_1SDV_20210407T095634_20210407T095659_037343_046675_8E66.zip'
|
# S1FilePath = r'Y:\不同传感器数据\SENTINEL-1\S1A_IW_GRDH_1SDV_20210407T095634_20210407T095659_037343_046675_8E66.zip'
|
||||||
S2FilePath = r'Y:\不同传感器数据\SENTINEL-2\S2B_MSIL2A_20210804T024549_N0301_R132_T50SQF_20210804T053331.zip'
|
# S2FilePath = r"D:\1Company\Python\RS_data_Dowload\Google_Download_New\data\S2B_MSIL1C_20210113T024049_N0209_R089_T51STB_20210113T041228.zip"
|
||||||
GF1PMSPath = r'Y:\不同传感器数据\GF-1\GF1_PMS2_E104.1_N36.6_20210308_L1A0005524847.tar.gz'
|
# GF1PMSPath = r'Y:\不同传感器数据\GF-1\GF1_PMS2_E104.1_N36.6_20210308_L1A0005524847.tar.gz'
|
||||||
H08FilePath = r"Y:\不同传感器数据\葵花8\NC_H08_20210802_2010_R21_FLDK.06001_06001.nc"
|
# H08FilePath = r"Y:\不同传感器数据\葵花8\NC_H08_20210802_2010_R21_FLDK.06001_06001.nc"
|
||||||
SNPPFilePath = r"Y:\不同传感器数据\VIIRS\VNP02IMG.A2021182.0418.001.2021182100800.nc"
|
# SNPPFilePath = r"Y:\不同传感器数据\VIIRS\VNP02IMG.A2021182.0418.001.2021182100800.nc"
|
||||||
|
#
|
||||||
GF3MDJPath = r'Y:\不同传感器数据\GF-3\GF3_MDJ_SS_024986_E120.8_N35.6_20210509_L1A_VHVV_L10005638033.tar.gz'
|
# GF3MDJPath = r'Y:\不同传感器数据\GF-3\GF3_MDJ_SS_024986_E120.8_N35.6_20210509_L1A_VHVV_L10005638033.tar.gz'
|
||||||
GF4PMIPath = r'Y:\不同传感器数据\GF-4\GF4_PMI_E119.8_N35.3_20210908_L1A0000417337.tar.gz'
|
# GF4PMIPath = r'Y:\不同传感器数据\GF-4\GF4_PMI_E119.8_N35.3_20210908_L1A0000417337.tar.gz'
|
||||||
S3OLFilePath = r'Y:\不同传感器数据\SENTINEL-3' \
|
# S3OLFilePath = r'Y:\不同传感器数据\SENTINEL-3' \
|
||||||
r'\S3B_OL_1_EFR____20210910T022645_20210910T022945_20210911T064342_0179_056_374_2340_LN1_O_NT_002.zip'
|
# r'\S3B_OL_1_EFR____20210910T022645_20210910T022945_20210911T064342_0179_056_374_2340_LN1_O_NT_002.zip'
|
||||||
S3SLFilePath = r'Y:\不同传感器数据\SENTINEL-3' \
|
# S3SLFilePath = r'Y:\不同传感器数据\SENTINEL-3' \
|
||||||
r'\S3A_SL_1_RBT____20210916T020956_20210916T021256_20210917T120953_0179_076_217_2340_LN2_O_NT_004.zip'
|
# r'\S3A_SL_1_RBT____20210916T020956_20210916T021256_20210917T120953_0179_076_217_2340_LN2_O_NT_004.zip'
|
||||||
# 读取 HJ-1 元数据
|
# 读取 HJ-1 元数据
|
||||||
hj1_dic = GetHJ1Data(HJ1FilePath)
|
# hj1_dic = GetHJ1Data(HJ1FilePath)
|
||||||
print(hj1_dic)
|
# print(hj1_dic)
|
||||||
# 读取 JPSS 元数据
|
# 读取 JPSS 元数据
|
||||||
jpss_dic = GetJPSSData(JPSSFilePath)
|
# jpss_dic = GetJPSSData(JPSSFilePath)
|
||||||
print(jpss_dic)
|
# print(jpss_dic)
|
||||||
# 读取 ZY2 元数据
|
# 读取 ZY2 元数据
|
||||||
zy2_mux_dic, zy2_pan_dic = GetZY02CData(ZY2FilePath)
|
# zy2_mux_dic, zy2_pan_dic = GetZY02CData(ZY2FilePath)
|
||||||
print(zy2_mux_dic)
|
# print(zy2_mux_dic)
|
||||||
print(zy2_pan_dic)
|
# print(zy2_pan_dic)
|
||||||
# 读取 ZY3 元数据
|
# 读取 ZY3 元数据
|
||||||
zy3_dic = GetZY3Data(ZY3FilePath)
|
# zy3_dic = GetZY3Data(ZY3FilePath)
|
||||||
print(zy3_dic)
|
# print(zy3_dic)
|
||||||
|
|
||||||
# 读取GF-PMS元数据
|
# 读取GF-PMS元数据
|
||||||
pms_mss_dic, pms_pan_dic = GetGFPMSData(GF1PMSPath)
|
# pms_mss_dic, pms_pan_dic = GetGFPMSData(GF1PMSPath)
|
||||||
print(pms_mss_dic)
|
# print(pms_mss_dic)
|
||||||
print(pms_pan_dic)
|
# print(pms_pan_dic)
|
||||||
# 读取葵花8元数据
|
# 读取葵花8元数据
|
||||||
h8_dic = GetH08Data(H08FilePath)
|
# h8_dic = GetH08Data(H08FilePath)
|
||||||
print(h8_dic)
|
# print(h8_dic)
|
||||||
# 读取 S2 元数据
|
# 读取 S2 元数据
|
||||||
s2_dic = GetSentinel2Data(S2FilePath)
|
# xml_path = r"D:\1Company\Python\RS_data_Dowload\Google_Download_New\data"
|
||||||
print(s2_dic)
|
# thumbnail_path = r"D:\1Company\Python\RS_data_Dowload\Google_Download_New\data"
|
||||||
|
# s2_dic = GetSentinel2Data(S2FilePath, xml_path, thumbnail_path)
|
||||||
|
# print(s2_dic)
|
||||||
# 读取 S1 元数据
|
# 读取 S1 元数据
|
||||||
s1_dic = GetSentinel1Data(S1FilePath)
|
# s1_dic = GetSentinel1Data(S1FilePath)
|
||||||
print(s1_dic)
|
# print(s1_dic)
|
||||||
# 读取 SNPP 元数据
|
# 读取 SNPP 元数据
|
||||||
snpp_dic = GetSNPPData(SNPPFilePath)
|
# snpp_dic = GetSNPPData(SNPPFilePath)
|
||||||
print(snpp_dic)
|
# print(snpp_dic)
|
||||||
|
|
||||||
# 读取 GF3 元数据
|
# 读取 GF3 元数据
|
||||||
gf3_dic = GetGF3MDJData(GF3MDJPath)
|
# gf3_dic = GetGF3MDJData(GF3MDJPath)
|
||||||
print(gf3_dic)
|
# print(gf3_dic)
|
||||||
# 读取 GF4 元数据
|
# 读取 GF4 元数据
|
||||||
gf4_pms_dic, gf4_irs_dic = GetGF4PMIData(GF4PMIPath)
|
# gf4_pms_dic, gf4_irs_dic = GetGF4PMIData(GF4PMIPath)
|
||||||
print(gf4_pms_dic)
|
# print(gf4_pms_dic)
|
||||||
print(gf4_irs_dic)
|
# print(gf4_irs_dic)
|
||||||
|
|
||||||
# 读取 S3 OL元数据
|
# 读取 S3 OL元数据
|
||||||
s3ol_dic = GetSentinel3OLData(S3OLFilePath)
|
# s3ol_dic = GetSentinel3OLData(S3OLFilePath)
|
||||||
print(s3ol_dic)
|
# print(s3ol_dic)
|
||||||
# # 读取 S3 SL元数据
|
# # 读取 S3 SL元数据
|
||||||
# s3sl_dic = GetSentinel3SLData(S3SLFilePath)
|
# s3sl_dic = GetSentinel3SLData(S3SLFilePath)
|
||||||
# print(s3sl_dic)
|
# print(s3sl_dic)
|
||||||
|
|
||||||
|
# 读取 S2 元数据
|
||||||
|
S2FilePath = r"F:\test\Sentinel\S2A_MSIL1C_20220102T031131_N0301_R075_T50SLG_20220102T050158.SAFE.zip"
|
||||||
|
xml_path = r"F:\test\Sentinel"
|
||||||
|
thumbnail_path = r"F:\test\Sentinel"
|
||||||
|
s2_dic = GetSentinel2Data(S2FilePath, xml_path, thumbnail_path)
|
||||||
|
print(s2_dic)
|
||||||
|
|
||||||
|
# 读取Landsat 8 数据
|
||||||
|
LandsatFilePath = r"F:\test\USGS_data\landsat_8_c1\LC81220342021355LGN00.tar.gz"
|
||||||
|
thumbnail_path1 = r"F:\test\USGS_data\landsat_8_c1"
|
||||||
|
txt_path = thumbnail_path1
|
||||||
|
Landsat_dic = GetLandsatData(LandsatFilePath, thumbnail_path1, txt_path)
|
||||||
|
print(Landsat_dic)
|
||||||
|
|
||||||
|
# 读取MODIS数据
|
||||||
|
|
||||||
|
MODIS_path = r"F:\test\MODIS\MOD11A1\MOD11A1.A2022038.h26v05.006.2022039112456.hdf" # MOD11A1数据信息读取
|
||||||
|
MODIS = GetModisData(MODIS_path)
|
||||||
|
print(MODIS)
|
||||||
|
|
||||||
|
# 读取GOCI数据
|
||||||
|
GOCI_FilePath = r"F:\test\GOCI_Data\GOCI_L2\2020-01-01\COMS_GOCI_L2A_GA_20200101001642.CDOM.he5"
|
||||||
|
GOCI = GetGOCIData(GOCI_FilePath)
|
||||||
|
print(GOCI)
|
||||||
|
|
||||||
|
# 读取GK2B_GOCI数据
|
||||||
|
GK2B_FilePath = r"F:\test\GOCI_Data\GK2_GC2_L2\AC\GK2B_GOCI2_L2_20220208_011530_LA_S010_AC.nc"
|
||||||
|
GK2B = GetGK2BData(GK2B_FilePath)
|
||||||
|
print(GK2B)
|
BIN
dms_client.db
BIN
dms_client.db
Binary file not shown.
@ -1,14 +1,22 @@
|
|||||||
affine==2.3.0
|
affine==2.3.0
|
||||||
attrs==21.4.0
|
attrs==21.4.0
|
||||||
certifi==2021.10.8
|
certifi==2021.10.8
|
||||||
click==8.0.3
|
click==8.0.4
|
||||||
click-plugins==1.1.1
|
click-plugins==1.1.1
|
||||||
cligj==0.7.2
|
cligj==0.7.2
|
||||||
colorama==0.4.4
|
colorama==0.4.4
|
||||||
GDAL @ file:///F:/Soft/Python%E5%BC%80%E5%8F%91/windows%E9%80%82%E9%85%8D%E5%BA%93/GDAL-3.4.1-cp310-cp310-win_amd64.whl
|
|
||||||
numpy==1.22.2
|
numpy==1.22.2
|
||||||
pyparsing==3.0.7
|
pyparsing==3.0.7
|
||||||
pyproj==3.3.0
|
pyproj==3.3.0
|
||||||
rasterio @ file:///F:/Soft/Python%E5%BC%80%E5%8F%91/windows%E9%80%82%E9%85%8D%E5%BA%93/rasterio-1.2.10-cp310-cp310-win_amd64.whl
|
|
||||||
six==1.16.0
|
six==1.16.0
|
||||||
snuggs==1.4.7
|
snuggs==1.4.7
|
||||||
|
flask-cors~=3.0.10
|
||||||
|
|
||||||
|
APScheduler~=3.8.1
|
||||||
|
Flask~=2.0.3
|
||||||
|
Pillow~=9.0.1
|
||||||
|
redis~=4.1.4
|
||||||
|
h5py~=3.6.0
|
||||||
|
requests~=2.27.1
|
||||||
|
dict2xml~=1.7.1
|
||||||
|
xmltodict~=0.12.0
|
8
run.py
Normal file
8
run.py
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
|
||||||
|
from common.config.factory import create_app
|
||||||
|
|
||||||
|
app = create_app()
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
print(app.url_map)
|
||||||
|
app.run(host="192.168.2.105", port=9550, debug=True, use_reloader=False)
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,165 +0,0 @@
|
|||||||
from xml.dom import minidom
|
|
||||||
from osgeo import gdal
|
|
||||||
from PIL import Image
|
|
||||||
import numpy as np
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
def uint16to8(bands, lower_percent=0.001, higher_percent=99.999):
|
|
||||||
"""
|
|
||||||
拉伸图像:图片16位转8位
|
|
||||||
:param bands: 输入栅格数据
|
|
||||||
:param lower_percent: 最低百分比
|
|
||||||
:param higher_percent: 最高百分比
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
out = np.zeros_like(bands, dtype=np.uint8)
|
|
||||||
n = bands.shape[0]
|
|
||||||
for i in range(n):
|
|
||||||
a = 0 # np.min(band)
|
|
||||||
b = 255 # np.max(band)
|
|
||||||
c = np.percentile(bands[i, :, :], lower_percent)
|
|
||||||
d = np.percentile(bands[i, :, :], higher_percent)
|
|
||||||
t = a + (bands[i, :, :] - c) * (b - a) / (d - c)
|
|
||||||
t[t < a] = a
|
|
||||||
t[t > b] = b
|
|
||||||
out[i, :, :] = t
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
||||||
def createXML(metadata, xlm_file):
|
|
||||||
"""
|
|
||||||
创建xlm文件并写入字典
|
|
||||||
:param metadata: 元数据信息
|
|
||||||
:param xlm_file: xlm文件
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
# 创建一个空的文档
|
|
||||||
document = minidom.Document() # 创建DOM文档对象
|
|
||||||
# 创建一个根节点对象
|
|
||||||
root = document.createElement('ProductMetaData')
|
|
||||||
# 设置根节点的属性
|
|
||||||
# root.setAttribute('', '')
|
|
||||||
# 将根节点添加到文档对象中
|
|
||||||
document.appendChild(root)
|
|
||||||
# 字典转xml
|
|
||||||
for key in metadata:
|
|
||||||
# 创建父节点
|
|
||||||
node_name = document.createElement(key)
|
|
||||||
# 给父节点设置文本
|
|
||||||
node_name.appendChild(document.createTextNode(str(metadata[key])))
|
|
||||||
# 将各父节点添加到根节点
|
|
||||||
root.appendChild(node_name)
|
|
||||||
# 写入xlm文档
|
|
||||||
with open(xlm_file, 'w', encoding='utf-8') as f:
|
|
||||||
document.writexml(f, indent='\t', newl='\n', addindent='\t', encoding='utf-8')
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
|
|
||||||
def GetJPSSData(in_file, xml_path, thumbnail_path):
|
|
||||||
"""
|
|
||||||
获取联合极轨卫星系统(JPSS-1)元数据:NOAA-20(Joint Polar Satellite System spacecraft)
|
|
||||||
:param xml_path:
|
|
||||||
:param thumbnail_path:
|
|
||||||
:param in_file:
|
|
||||||
:return: 元数据字典
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# 生成缩略图
|
|
||||||
in_path, basename = os.path.split(in_file)
|
|
||||||
ThumbnailName = os.path.splitext(basename)[0] + "_thumb.jpg"
|
|
||||||
ThumbnailPath = os.path.join(thumbnail_path, ThumbnailName)
|
|
||||||
|
|
||||||
gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "YES")
|
|
||||||
in_datasets = gdal.Open(in_file)
|
|
||||||
meta_data = in_datasets.GetMetadata()
|
|
||||||
# 取出子数据集
|
|
||||||
datasets = in_datasets.GetSubDatasets()
|
|
||||||
red_data = gdal.Open(datasets[0][0]).ReadAsArray()
|
|
||||||
nir_data = gdal.Open(datasets[3][0]).ReadAsArray()
|
|
||||||
swir_data = gdal.Open(datasets[9][0]).ReadAsArray()
|
|
||||||
img_data = np.array([red_data, nir_data, swir_data])
|
|
||||||
img_data = uint16to8(img_data)
|
|
||||||
# Array转Image
|
|
||||||
img_data2 = np.transpose(img_data, (1, 2, 0))
|
|
||||||
img_data2 = img_data2[:, :, ::-1]
|
|
||||||
img = Image.fromarray(img_data2)
|
|
||||||
# 压缩图片大小
|
|
||||||
if img_data.shape[1] > img_data.shape[2]:
|
|
||||||
width = 512
|
|
||||||
height = int(width / img_data.shape[1] * img_data.shape[2])
|
|
||||||
else:
|
|
||||||
height = 512
|
|
||||||
width = int(height / img_data.shape[1] * img_data.shape[2])
|
|
||||||
img.thumbnail((width, height))
|
|
||||||
img.save(ThumbnailPath, "PNG")
|
|
||||||
|
|
||||||
# 释放内存
|
|
||||||
del in_datasets
|
|
||||||
del img_data
|
|
||||||
del img_data2
|
|
||||||
del img
|
|
||||||
|
|
||||||
# 生成XML文件
|
|
||||||
xmlFileName = os.path.splitext(basename)[0] + ".xml"
|
|
||||||
xmlPath = os.path.join(xml_path, xmlFileName)
|
|
||||||
createXML(meta_data, xmlPath)
|
|
||||||
|
|
||||||
# 产品日期
|
|
||||||
ProductionTime = meta_data['ProductionTime']
|
|
||||||
StartTime = meta_data['StartTime']
|
|
||||||
EndTime = meta_data['EndTime']
|
|
||||||
|
|
||||||
# 其他信息
|
|
||||||
ImageGSD = str(meta_data['LongName']).split(" ")[-1]
|
|
||||||
Bands = str(meta_data['title']).split(" ")[1]
|
|
||||||
|
|
||||||
# 中心经纬度
|
|
||||||
productUpperLeftLat = meta_data['NorthBoundingCoordinate'] # 左上纬度
|
|
||||||
productUpperLeftLong = meta_data['WestBoundingCoordinate'] # 左上经度
|
|
||||||
productUpperRightLat = meta_data['NorthBoundingCoordinate'] # 右上纬度
|
|
||||||
productUpperRightLong = meta_data['EastBoundingCoordinate'] # 右上经度
|
|
||||||
productLowerLeftLat = meta_data['SouthBoundingCoordinate'] # 左下纬度
|
|
||||||
productLowerLeftLong = meta_data['WestBoundingCoordinate'] # 左下经度
|
|
||||||
productLowerRightLat = meta_data['SouthBoundingCoordinate'] # 右下纬度
|
|
||||||
productLowerRightLong = meta_data['EastBoundingCoordinate'] # 右下纬度
|
|
||||||
|
|
||||||
# 边界几何
|
|
||||||
boundaryGeomStr = f'POLYGON(({productUpperLeftLong} {productUpperLeftLat},' \
|
|
||||||
f'{productUpperRightLong} {productUpperRightLat},' \
|
|
||||||
f'{productLowerRightLong} {productLowerRightLat},' \
|
|
||||||
f'{productLowerLeftLong} {productLowerLeftLat},' \
|
|
||||||
f'{productUpperLeftLong} {productUpperLeftLat}))'
|
|
||||||
|
|
||||||
# 构建字典
|
|
||||||
jpss_dict = {"ProduceTime": ProductionTime,
|
|
||||||
"StartTime": StartTime,
|
|
||||||
"EndTime": EndTime,
|
|
||||||
"CloudPercent": "",
|
|
||||||
# "TopLeftLatitude": productUpperLeftLat,
|
|
||||||
# "TopLeftLongitude": productUpperLeftLong,
|
|
||||||
# "TopRightLatitude": productUpperRightLat,
|
|
||||||
# "TopRightLongitude": productUpperRightLong,
|
|
||||||
# "BottomLeftLatitude": productLowerLeftLat,
|
|
||||||
# "BottomLeftLongitude": productLowerLeftLong,
|
|
||||||
# "BottomRightLatitude": productLowerRightLat,
|
|
||||||
# "BottomRightLongitude": productLowerRightLong,
|
|
||||||
"boundaryGeomStr": boundaryGeomStr,
|
|
||||||
"bands": Bands,
|
|
||||||
"ImageGSD": ImageGSD,
|
|
||||||
"ProjectedCoordinates": "",
|
|
||||||
"CollectionCode": "",
|
|
||||||
"ThumbnailPath": ThumbnailPath,
|
|
||||||
"ThumbnailName": ThumbnailName,
|
|
||||||
"xmlPath": xmlPath,
|
|
||||||
"xmlFileName": xmlFileName,
|
|
||||||
"DirectoryDepth": "day"}
|
|
||||||
|
|
||||||
# 判断字典是否为空
|
|
||||||
if not jpss_dict:
|
|
||||||
return {"code": -1, "msg": "没有满足条件的数据字典..."}
|
|
||||||
print(jpss_dict)
|
|
||||||
return jpss_dict
|
|
||||||
except Exception as e:
|
|
||||||
print(str(e))
|
|
||||||
return {"code": -1, "msg": str(e)}
|
|
@ -1,95 +0,0 @@
|
|||||||
#!/usr/bin/python3
|
|
||||||
# coding= utf-8
|
|
||||||
|
|
||||||
import sqlite3
|
|
||||||
|
|
||||||
|
|
||||||
# @param text 文本
|
|
||||||
# @return True:是,False:不是
|
|
||||||
def __string(text):
|
|
||||||
return True if isinstance(text, str) else False
|
|
||||||
|
|
||||||
|
|
||||||
# 检查文本类型是否为浮点型
|
|
||||||
# @param text 文本
|
|
||||||
# @return True:是,False:不是
|
|
||||||
def __float(text):
|
|
||||||
if __string(text):
|
|
||||||
try:
|
|
||||||
return True if float("{0}".format(text)) else False
|
|
||||||
except Exception:
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
return True if isinstance(text, float) else False
|
|
||||||
|
|
||||||
|
|
||||||
# 检查文本类型是否为浮点型
|
|
||||||
# @param text 文本
|
|
||||||
# @return True:是,False:不是
|
|
||||||
def __int(text):
|
|
||||||
if __string(text):
|
|
||||||
try:
|
|
||||||
return True if float("{0}".format(text)) else False
|
|
||||||
except Exception:
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
return True if isinstance(text, float) else False
|
|
||||||
|
|
||||||
|
|
||||||
# 检查文本类型是否为数字型
|
|
||||||
# @param text 文本
|
|
||||||
# @return True:是,False:不是
|
|
||||||
def __number(text):
|
|
||||||
return True if re.search("[^0-9]", text) == None else False
|
|
||||||
|
|
||||||
|
|
||||||
def func_year(s):
|
|
||||||
print('func_year:', s)
|
|
||||||
|
|
||||||
|
|
||||||
def func_month(s):
|
|
||||||
print('func_month:', s)
|
|
||||||
|
|
||||||
|
|
||||||
def create_data_collection_info_table():
|
|
||||||
con = sqlite3.connect("../dms_client.db")
|
|
||||||
cur = con.cursor()
|
|
||||||
sql = "CREATE TABLE IF NOT EXISTS data_collection_info(id INTEGER PRIMARY KEY,collection_code TEXT,function_name TEXT,describe TEXT)"
|
|
||||||
cur.execute(sql)
|
|
||||||
# ①:添加单条数据
|
|
||||||
data = "1,'Desire',5,'test'"
|
|
||||||
cur.execute('INSERT INTO data_collection_info VALUES (%s)' % data)
|
|
||||||
# ②:添加单条数据
|
|
||||||
cur.execute("INSERT INTO data_collection_info values(?,?,?,?)", (6, "zgq", 20, 'test'))
|
|
||||||
# ③:添加多条数据
|
|
||||||
cur.executemany('INSERT INTO data_collection_info VALUES (?,?,?,?)',
|
|
||||||
[(3, 'name3', 19, 'test'), (4, 'name4', 26, 'test')])
|
|
||||||
cur.execute("UPDATE data_collection_info SET collection_code=? WHERE id=?", ('test1', 19))
|
|
||||||
con.commit()
|
|
||||||
# 关闭游标
|
|
||||||
cur.close()
|
|
||||||
# 断开数据库连接
|
|
||||||
con.close()
|
|
||||||
|
|
||||||
|
|
||||||
# 动态调用函数
|
|
||||||
# @param tag 标签名
|
|
||||||
# @param text 文本
|
|
||||||
# @return True:OK, False:NG
|
|
||||||
def item_check(func_name, text):
|
|
||||||
if type and text:
|
|
||||||
try:
|
|
||||||
# 调用导入模块中的函数,并传参
|
|
||||||
return eval("__{0}".format(func_name))(text)
|
|
||||||
except Exception:
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
result = item_check("num", 123.23)
|
|
||||||
print(result)
|
|
||||||
strs = ['year', 'month']
|
|
||||||
for s in strs:
|
|
||||||
globals().get('func_%s' % s)(s)
|
|
119
scheduled_task/queue_storage.py
Normal file
119
scheduled_task/queue_storage.py
Normal file
@ -0,0 +1,119 @@
|
|||||||
|
# coding: utf-8
|
||||||
|
# Author:tajochen
|
||||||
|
|
||||||
|
import time
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from application.settings import Config
|
||||||
|
from common.tools.dms import dms_login, dms_task_record
|
||||||
|
from util.upload_client import upload_file_client
|
||||||
|
|
||||||
|
from util.http_util import httpUtil
|
||||||
|
|
||||||
|
|
||||||
|
# 动态调用函数
|
||||||
|
# @param tag 标签名
|
||||||
|
# @param text 文本
|
||||||
|
# @return True:OK, False:NG
|
||||||
|
def item_check(func_name, text):
|
||||||
|
if type and text:
|
||||||
|
try:
|
||||||
|
# 调用导入模块中的函数,并传参
|
||||||
|
return eval("__{0}".format(func_name))(text)
|
||||||
|
except Exception:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
# 动态调用遥感函数
|
||||||
|
# @param tag 标签名
|
||||||
|
# @param text 文本
|
||||||
|
# @return True:OK, False:NG
|
||||||
|
def analysis_remote_sensing_data(func_name, collection_code, path):
|
||||||
|
"""
|
||||||
|
解析JPSS-VJ102元数据
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
print('开始扫描VJ102IMG数据集')
|
||||||
|
#collectionCode = 'VJ102IMG'
|
||||||
|
# 用户登录
|
||||||
|
token_s = dms_login()
|
||||||
|
# 判断定时任务是否在进行
|
||||||
|
task = dms_task_record(token_s, collection_code)
|
||||||
|
# 如果不是空说明正在进行
|
||||||
|
if task is not None and len(task) > 0:
|
||||||
|
return 1
|
||||||
|
|
||||||
|
file_total_size = ""
|
||||||
|
file_total_name = ""
|
||||||
|
xmlPath = Config.XML_PATH # 解析出的xml文件保存的路径
|
||||||
|
ThumbnailPath = Config.THUMBNAIL_PATH # 解析出的缩略图保存的路径
|
||||||
|
|
||||||
|
# 解析遥感数据文件(demo)
|
||||||
|
remote_sensing_data_dic = globals().get('func_%s' % func_name)(path, xmlPath, ThumbnailPath)
|
||||||
|
|
||||||
|
# 配置文件服务器参数
|
||||||
|
url = Config.DFS_UPLOAD_URL
|
||||||
|
files = {'file': open(remote_sensing_data_dic['xmlPath'], 'rb')}
|
||||||
|
options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collection_code}
|
||||||
|
# 上传生成的xml文件到文件服务器
|
||||||
|
xml = requests.post(url, data=options, files=files)
|
||||||
|
|
||||||
|
url = Config.DFS_UPLOAD_URL
|
||||||
|
files = {'file': open(remote_sensing_data_dic['ThumbnailPath'], 'rb')}
|
||||||
|
options = {'output': 'json', 'path': '/archive_data/remote_sensing_data/' + collection_code}
|
||||||
|
# 上传生成的xml文件到文件服务器
|
||||||
|
ThumbnailName = requests.post(url, data=options, files=files)
|
||||||
|
|
||||||
|
CollectionCode = remote_sensing_data_dic['CollectionCode']
|
||||||
|
DirectoryDepth = remote_sensing_data_dic['DirectoryDepth']
|
||||||
|
StartTime = remote_sensing_data_dic['StartTime']
|
||||||
|
# uc = upload_client(path, DirectoryDepth, StartTime[0:19])
|
||||||
|
uc = upload_file_client(path, DirectoryDepth, StartTime[0:19], collection_code)
|
||||||
|
|
||||||
|
StartTime = time.mktime(time.strptime(remote_sensing_data_dic['StartTime'][0:19], '%Y-%m-%d %H:%M:%S'))
|
||||||
|
EndTime = time.mktime(time.strptime(remote_sensing_data_dic['EndTime'][0:19], '%Y-%m-%d %H:%M:%S'))
|
||||||
|
|
||||||
|
# 入库遥感数据
|
||||||
|
res_data = httpUtil(url=Config.RESING_DATA_URL,
|
||||||
|
data={"collectionCode": collection_code, "shootingTimeStartTs": StartTime,
|
||||||
|
"shootingTimeEndTs": EndTime,
|
||||||
|
"fileMd5": uc['md5'], "fileName": uc['fileName'], "filePath": uc['file_path'],
|
||||||
|
"fileSize": uc['file_size'], "cloudCoverage": remote_sensing_data_dic['CloudPercent'],
|
||||||
|
"metaInformationFile": xml.json()['path'],
|
||||||
|
"thumbnailFile": ThumbnailName.json()['path'],
|
||||||
|
"remarks": "", "boundaryGeomStr": remote_sensing_data_dic['boundaryGeomStr']},
|
||||||
|
token=token_s).post_no_patam_herder()
|
||||||
|
print(res_data.json()['data'])
|
||||||
|
|
||||||
|
file_total_size = file_total_size + str(uc['file_size'])
|
||||||
|
file_total_size = file_total_size + ","
|
||||||
|
|
||||||
|
file_total_name = file_total_name + uc['fileName']
|
||||||
|
file_total_name = file_total_name + ","
|
||||||
|
|
||||||
|
# 添加遥感数据归档任务
|
||||||
|
res = httpUtil(url=Config.DATA_TASK_URL,
|
||||||
|
data={"clientCode": "client1", "collectionCode": collection_code, "storageFileList": file_total_name,
|
||||||
|
"storageFileSizeList": file_total_size, "remarks": ""}, token=token_s).post_no_patam_herder()
|
||||||
|
task_code = res.json()['data']
|
||||||
|
|
||||||
|
# 结束遥感数据归档任务
|
||||||
|
header = {"Authorization": token_s}
|
||||||
|
res = requests.post(url=Config.DATA_END_TASK_URL,
|
||||||
|
|
||||||
|
params={"taskCode": task_code}, headers=header).json()
|
||||||
|
if res_data.json()['status'] == 0:
|
||||||
|
return 0
|
||||||
|
else:
|
||||||
|
return 2
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
result = item_check("num", 123.23)
|
||||||
|
print(result)
|
||||||
|
strs = ['year', 'month']
|
||||||
|
for s in strs:
|
||||||
|
globals().get('func_%s' % s)(s)
|
@ -1,5 +0,0 @@
|
|||||||
"""
|
|
||||||
Author : XinYi Song
|
|
||||||
Time : 2021/12/3 11:17
|
|
||||||
Desc:
|
|
||||||
"""
|
|
Binary file not shown.
Binary file not shown.
@ -4,12 +4,17 @@
|
|||||||
import socket
|
import socket
|
||||||
import sys
|
import sys
|
||||||
import struct
|
import struct
|
||||||
|
import json
|
||||||
|
|
||||||
# 本机信息
|
# 本机信息
|
||||||
|
import time
|
||||||
|
|
||||||
|
from util.simple_sqlite3_tool import SimpleSQLite3Tool
|
||||||
|
|
||||||
host_ip = socket.gethostbyname(socket.gethostname())
|
host_ip = socket.gethostbyname(socket.gethostname())
|
||||||
# 组播组IP和端口
|
# 组播组IP和端口
|
||||||
mcast_group_ip = '239.255.255.252'
|
mcast_group_ip = '224.1.1.1'
|
||||||
mcast_group_port = 5678
|
mcast_group_port = 2234
|
||||||
|
|
||||||
|
|
||||||
def receiver():
|
def receiver():
|
||||||
@ -34,6 +39,23 @@ def receiver():
|
|||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
data, address = sock.recvfrom(4096)
|
data, address = sock.recvfrom(4096)
|
||||||
|
sql_tool = SimpleSQLite3Tool("../dms_client.db")
|
||||||
|
data2 = json.loads(data)
|
||||||
|
# 若组播数据不正确
|
||||||
|
if len(data2) < 3:
|
||||||
|
return
|
||||||
|
# 若数据已存在
|
||||||
|
res1 = sql_tool.query("select * from data_list where collection_code=? and file_name=?;", (data2.collectionCode,data2.filename))
|
||||||
|
if len(res1) != 0:
|
||||||
|
return
|
||||||
|
print(res1)
|
||||||
|
# 存入归档数据
|
||||||
|
current_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())
|
||||||
|
res2 = sql_tool.execute("insert into data_list "
|
||||||
|
"(file_path,file_name,collection_code,archive_status,created_time) values (?,?,?,?,?);",
|
||||||
|
(data2.file_path, data2.file_name, data2.collection_code, 0, current_time))
|
||||||
|
print(res2)
|
||||||
|
sql_tool.close()
|
||||||
except socket.error as e:
|
except socket.error as e:
|
||||||
print(f"while receive message error occur:{e}")
|
print(f"while receive message error occur:{e}")
|
||||||
else:
|
else:
|
||||||
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@ -1,84 +0,0 @@
|
|||||||
"""
|
|
||||||
Author : XinYi Song
|
|
||||||
Time : 2021/11/4 9:27
|
|
||||||
Desc:
|
|
||||||
"""
|
|
||||||
import datetime
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
|
|
||||||
|
|
||||||
def file_store_path(time_stamp):
|
|
||||||
"""
|
|
||||||
:param time_stamp: 时间戳类型时间
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
now = int(round(time_stamp * 1000))
|
|
||||||
t = time.localtime(now / 1000)
|
|
||||||
return os.path.join('E:/data/upload', str(t[0]), str(t[1]), str(t[2]))
|
|
||||||
|
|
||||||
|
|
||||||
def file_store_path_year(data_str_time, upload_path, conllection_code):
|
|
||||||
"""
|
|
||||||
目录到年
|
|
||||||
:param upload_path:
|
|
||||||
:param data_str_time: 字符串类型
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
t = time.strptime(data_str_time, '%Y-%m-%d %H:%M:%S')
|
|
||||||
return os.path.join(upload_path, conllection_code, str(t[0]))
|
|
||||||
|
|
||||||
|
|
||||||
def file_store_path_month(data_str_time, upload_path, conllection_code):
|
|
||||||
"""
|
|
||||||
目录到月
|
|
||||||
:param upload_path:
|
|
||||||
:param data_str_time:
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
t = time.strptime(data_str_time, '%Y-%m-%d %H:%M:%S')
|
|
||||||
return os.path.join(upload_path, conllection_code, str(t[0]), str(t[1]))
|
|
||||||
|
|
||||||
|
|
||||||
def file_store_path_day(data_str_time, upload_path, conllection_code):
|
|
||||||
"""
|
|
||||||
目录到日
|
|
||||||
:param upload_path:
|
|
||||||
:param data_str_time: 字符串类型的时间
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
t = time.strptime(data_str_time, '%Y-%m-%d %H:%M:%S')
|
|
||||||
return os.path.join(upload_path, conllection_code, str(t[0]), str(t[1]), str(t[2]))
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
# time_stamp1 = time.time()
|
|
||||||
# print(time_stamp1)
|
|
||||||
str_time = '2020-06-08 09:33:07'
|
|
||||||
t = time.strptime(str_time, '%Y-%m-%d %H:%M:%S')
|
|
||||||
# path = os.path.join('../upload', str(t[0]), str(t[1]), str(t[2]))
|
|
||||||
# if not os.path.exists(path):
|
|
||||||
# os.makedirs(path)
|
|
||||||
# print(path)
|
|
||||||
# time_stamp = float(time_stamp1)
|
|
||||||
# now = int(round(time_stamp * 1000))
|
|
||||||
# t = time.localtime(now / 1000)
|
|
||||||
# print(t)
|
|
||||||
|
|
||||||
# list1 = ['张三', '李四']
|
|
||||||
# token_s = dms_login()
|
|
||||||
# dms_list = dms_sensing_data(token_s)
|
|
||||||
# 数据库返回值
|
|
||||||
# list2 = ['张三', '李四']
|
|
||||||
|
|
||||||
# d = [y for y in list2 if y not in list1]
|
|
||||||
# if d is None or len(d) == 0:
|
|
||||||
# print("d为空")
|
|
||||||
# else:
|
|
||||||
# print(d)
|
|
||||||
# file_dir = 'C:/Users/HP/Desktop/数管/'
|
|
||||||
# dir_list = os.listdir(file_dir)
|
|
||||||
# print(dir_list)
|
|
||||||
# timestring = '2016-12-21 10:22:56'
|
|
||||||
# print(time.mktime(time.strptime(timestring, '%Y-%m-%d %H:%M:%S'))) # 1482286976.0
|
|
||||||
print(t)
|
|
1050
util/file_util.py
1050
util/file_util.py
File diff suppressed because it is too large
Load Diff
@ -1,81 +0,0 @@
|
|||||||
import exifread
|
|
||||||
import re
|
|
||||||
import json
|
|
||||||
import requests
|
|
||||||
|
|
||||||
def latitude_and_longitude_convert_to_decimal_system(*arg):
|
|
||||||
"""
|
|
||||||
经纬度转为小数, param arg:
|
|
||||||
:return: 十进制小数
|
|
||||||
"""
|
|
||||||
return float(arg[0]) + ((float(arg[1]) + (float(arg[2].split('/')[0]) / float(arg[2].split('/')[-1]) / 60)) / 60)
|
|
||||||
|
|
||||||
def find_GPS_image(pic_path):
|
|
||||||
GPS = {}
|
|
||||||
date = ''
|
|
||||||
with open(pic_path, 'rb') as f:
|
|
||||||
tags = exifread.process_file(f)
|
|
||||||
for tag, value in tags.items():
|
|
||||||
if re.match('GPS GPSLatitudeRef', tag):
|
|
||||||
GPS['GPSLatitudeRef'] = str(value)
|
|
||||||
elif re.match('GPS GPSLongitudeRef', tag):
|
|
||||||
GPS['GPSLongitudeRef'] = str(value)
|
|
||||||
elif re.match('GPS GPSAltitudeRef', tag):
|
|
||||||
GPS['GPSAltitudeRef'] = str(value)
|
|
||||||
elif re.match('GPS GPSLatitude', tag):
|
|
||||||
try:
|
|
||||||
match_result = re.match('\[(\w*),(\w*),(\w.*)/(\w.*)\]', str(value)).groups()
|
|
||||||
GPS['GPSLatitude'] = int(match_result[0]), int(match_result[1]), int(match_result[2])
|
|
||||||
except:
|
|
||||||
deg, min, sec = [x.replace(' ', '') for x in str(value)[1:-1].split(',')]
|
|
||||||
GPS['GPSLatitude'] = latitude_and_longitude_convert_to_decimal_system(deg, min, sec)
|
|
||||||
elif re.match('GPS GPSLongitude', tag):
|
|
||||||
try:
|
|
||||||
match_result = re.match('\[(\w*),(\w*),(\w.*)/(\w.*)\]', str(value)).groups()
|
|
||||||
GPS['GPSLongitude'] = int(match_result[0]), int(match_result[1]), int(match_result[2])
|
|
||||||
except:
|
|
||||||
deg, min, sec = [x.replace(' ', '') for x in str(value)[1:-1].split(',')]
|
|
||||||
GPS['GPSLongitude'] = latitude_and_longitude_convert_to_decimal_system(deg, min, sec)
|
|
||||||
elif re.match('GPS GPSAltitude', tag):
|
|
||||||
GPS['GPSAltitude'] = str(value)
|
|
||||||
elif re.match('.*Date.*', tag):
|
|
||||||
date = str(value)
|
|
||||||
return {'GPS_information': GPS, 'date_information': date}
|
|
||||||
|
|
||||||
def find_address_from_GPS(GPS):
|
|
||||||
"""
|
|
||||||
使用Geocoding API把经纬度坐标转换为结构化地址。
|
|
||||||
:param GPS:
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
secret_key = 'zbLsuDDL4CS2U0M4KezOZZbGUY9iWtVf'
|
|
||||||
if not GPS['GPS_information']:
|
|
||||||
return '该照片无GPS信息'
|
|
||||||
lat, lng = GPS['GPS_information']['GPSLatitude'], GPS['GPS_information']['GPSLongitude']
|
|
||||||
baidu_map_api = "http://api.map.baidu.com/geocoder/v2/?ak={0}&callback=renderReverse&location={1},{2}s&output=json&pois=0".format(
|
|
||||||
secret_key, lat, lng)
|
|
||||||
response = requests.get(baidu_map_api)
|
|
||||||
content = response.text.replace("renderReverse&&renderReverse(", "")[:-1]
|
|
||||||
baidu_map_address = json.loads(content)
|
|
||||||
formatted_address = baidu_map_address["result"]["formatted_address"]
|
|
||||||
province = baidu_map_address["result"]["addressComponent"]["province"]
|
|
||||||
city = baidu_map_address["result"]["addressComponent"]["city"]
|
|
||||||
district = baidu_map_address["result"]["addressComponent"]["district"]
|
|
||||||
return formatted_address,province,city,district
|
|
||||||
|
|
||||||
pic_path = 'D:\pythonjob\pic-time&location\DJI_0001.jpg'
|
|
||||||
|
|
||||||
GPS_info = find_GPS_image(pic_path)
|
|
||||||
address = find_address_from_GPS(GPS=GPS_info)
|
|
||||||
#print(GPS_info)
|
|
||||||
#print(address)
|
|
||||||
|
|
||||||
x = list(GPS_info.values())
|
|
||||||
#print(x)
|
|
||||||
time = x[1]
|
|
||||||
gps_dict_formate = x[0]
|
|
||||||
y = list(gps_dict_formate.values())
|
|
||||||
information = '拍照时间:'+time+',拍照地点:'+str(address[0])+'(经度:'+str(y[2])+' '+str(y[3])+',纬度:'+str(y[0])+' '+str(y[1])+',高度:'+str(y[5])+'米)'
|
|
||||||
|
|
||||||
print(pic_path)
|
|
||||||
print(information)
|
|
@ -1,86 +0,0 @@
|
|||||||
"""
|
|
||||||
Author : XinYi Song
|
|
||||||
Time : 2021/11/3 14:29
|
|
||||||
Desc:
|
|
||||||
"""
|
|
||||||
from util.file_store_path import file_store_path_day, file_store_path_year, file_store_path_month
|
|
||||||
|
|
||||||
"""
|
|
||||||
实现文件断点续传
|
|
||||||
"""
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
from hashlib import md5
|
|
||||||
|
|
||||||
FILE_DIR = os.path.dirname(__file__)
|
|
||||||
|
|
||||||
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
|
|
||||||
home = os.path.join(BASE_DIR, "E:/data/upload")
|
|
||||||
|
|
||||||
|
|
||||||
# 定义一个函数,计算进度条
|
|
||||||
def bar(num=1, sum=100):
|
|
||||||
rate = float(num) / float(sum)
|
|
||||||
rate_num = int(rate * 100)
|
|
||||||
temp = '\r%d %%' % rate_num
|
|
||||||
sys.stdout.write(temp)
|
|
||||||
|
|
||||||
|
|
||||||
def md5_file(name):
|
|
||||||
m = md5()
|
|
||||||
a_file = open(name, 'rb') #需要使用二进制格式读取文件内容
|
|
||||||
m.update(a_file.read())
|
|
||||||
a_file.close()
|
|
||||||
return m.hexdigest()
|
|
||||||
|
|
||||||
|
|
||||||
def upload_client(local_path, depth, dateTime, conllection_code):
|
|
||||||
global file_path
|
|
||||||
while True:
|
|
||||||
file_byte_size = os.stat(local_path).st_size # 获取文件的大小
|
|
||||||
file_name = os.path.basename(local_path) # 设置文件名
|
|
||||||
md5 = md5_file(local_path)
|
|
||||||
|
|
||||||
has_sent = 0
|
|
||||||
file_obj = open(local_path, 'rb') # 对文件进行读操作
|
|
||||||
file_obj.seek(has_sent) # 调整指针
|
|
||||||
if depth == 'year':
|
|
||||||
file_path = file_store_path_year(dateTime, home, conllection_code)
|
|
||||||
if not os.path.exists(file_path):
|
|
||||||
os.makedirs(file_path)
|
|
||||||
if depth == 'month':
|
|
||||||
file_path = file_store_path_month(dateTime, home, conllection_code)
|
|
||||||
if not os.path.exists(file_path):
|
|
||||||
os.makedirs(file_path)
|
|
||||||
if depth == 'day':
|
|
||||||
file_path = file_store_path_day(dateTime, home, conllection_code)
|
|
||||||
if not os.path.exists(file_path):
|
|
||||||
os.makedirs(file_path)
|
|
||||||
path = os.path.join(file_path, file_name)
|
|
||||||
has_received = 0
|
|
||||||
|
|
||||||
# 首先判断该路径下是否已存在文件
|
|
||||||
if os.path.exists(path):
|
|
||||||
f = open(path, 'wb')
|
|
||||||
else:
|
|
||||||
f = open(path, 'wb')
|
|
||||||
|
|
||||||
while has_sent < file_byte_size:
|
|
||||||
# 读出数据
|
|
||||||
data = file_obj.read(1024)
|
|
||||||
try:
|
|
||||||
# 写入数据
|
|
||||||
f.write(data)
|
|
||||||
has_received += len(data)
|
|
||||||
if not data:
|
|
||||||
raise Exception
|
|
||||||
except Exception:
|
|
||||||
flag = False
|
|
||||||
break
|
|
||||||
has_sent += len(data)
|
|
||||||
bar(has_sent, file_byte_size) # 进度条
|
|
||||||
print("文件上传成功!")
|
|
||||||
file_obj.close()
|
|
||||||
f.close()
|
|
||||||
file_dict = {'fileName': file_name, 'md5': md5, 'file_size': file_byte_size, 'file_path': file_path, 'type': 'ok'}
|
|
||||||
return file_dict
|
|
@ -1,91 +0,0 @@
|
|||||||
"""
|
|
||||||
Author : XinYi Song
|
|
||||||
Time : 2021/11/4 16:59
|
|
||||||
Desc:
|
|
||||||
"""
|
|
||||||
import rasterio
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from util.xml_util import xml_to_dict, dict_to_xml
|
|
||||||
|
|
||||||
|
|
||||||
def gf4_pmi_001(file_name, xml_name):
|
|
||||||
"""
|
|
||||||
|
|
||||||
:param file_name: 扫描文件传过来的遥感数据源文件的路径
|
|
||||||
:param xmlPath: 解析出的xml文件存储的路径
|
|
||||||
:param ThumbnailPath: 解析出的缩略图文件存储的路径
|
|
||||||
:return:
|
|
||||||
"""
|
|
||||||
file_path = 'E:/sensing/GF4_PMI_001/'
|
|
||||||
with rasterio.open(file_path+file_name, 'r') as ds:
|
|
||||||
# 存放xml,缩略图的文件夹,由根文件夹+数据集代码命名的文件夹组成
|
|
||||||
print('该栅格数据的基本数据集信息:')
|
|
||||||
CollectionCode = 'GF4_PMI_001' # 数据集代码
|
|
||||||
DataFormat = ds.driver # DataFormat 数据格式
|
|
||||||
NumberBands = ds.count # NumberBands 波段数目
|
|
||||||
ImageWidth = ds.width # ImageWidth 影像宽度
|
|
||||||
ImageHeight = ds.height # ImageHeight 影像高度
|
|
||||||
GeographicScope = ds.bounds # GeographicScope 地理范围
|
|
||||||
ReflectionParameter = ds.transform # ReflectionParameter 反射变换参数(六参数模型)
|
|
||||||
ProjectionDefinition = ds.crs # ProjectionDefinition 投影定义
|
|
||||||
# print(CRS.from_epsg(4326))
|
|
||||||
# 获取第一个波段数据,跟GDAL一样索引从1开始
|
|
||||||
# 直接获得numpy.ndarray类型的二维数组表示,如果read()函数不加参数,则得到所有波段(第一个维度是波段)
|
|
||||||
band1 = ds.read(1)
|
|
||||||
FirstBindMax = band1.max() # FirstBindMax 第一波段的最大值
|
|
||||||
|
|
||||||
FirstBindMin = band1.min() # FirstBindMin 第一波段的最小值
|
|
||||||
FirstBindAverage = band1.mean() # FirstBindAverage 第一波段的平均值
|
|
||||||
# 根据地理坐标得到行列号
|
|
||||||
x, y = (ds.bounds.left + 300, ds.bounds.top - 300) # 距离左上角东300米,南300米的投影坐标
|
|
||||||
row, col = ds.index(x, y) # 对应的行列号
|
|
||||||
print(f'(投影坐标{x}, {y})对应的行列号是({row}, {col})')
|
|
||||||
ProjectedCoordinates = x, y # ProjectedCoordinates 投影坐标
|
|
||||||
RowNumber = row, col # RowNumber 对应的行列号
|
|
||||||
# 根据行列号得到地理坐标
|
|
||||||
x, y = ds.xy(row, col) # 中心点的坐标
|
|
||||||
print(f'行列号({row}, {col})对应的中心投影坐标是({x}, {y})') #
|
|
||||||
CenterProjectionCoordinates = x, y # CenterProjectionCoordinates 中心投影坐标
|
|
||||||
# 'C:/Users/HP/Desktop/Number tube/GF4_PMI_E119.8_N35.3_20210908_L1A0000417337/GF4_PMS_E119.8_N35.3_20210908_L1A0000417337.xml'
|
|
||||||
# 传入xml文件路径,解析xml文件
|
|
||||||
# xml_name 存储后的xml文件的路径+文件名
|
|
||||||
xml_dict = xml_to_dict(file_path+xml_name)
|
|
||||||
StartTime = xml_dict['ProductMetaData']['StartTime'] # 开始采集时间
|
|
||||||
EndTime = xml_dict['ProductMetaData']['EndTime'] # 结束采集时间
|
|
||||||
CloudPercent = xml_dict['ProductMetaData']['CloudPercent'] # 云覆盖量百分比
|
|
||||||
TopLeftLatitude = xml_dict['ProductMetaData']['TopLeftLatitude'] # 左上纬度
|
|
||||||
TopLeftLongitude = xml_dict['ProductMetaData']['TopLeftLongitude'] # 左上经度
|
|
||||||
TopRightLatitude = xml_dict['ProductMetaData']['TopRightLatitude'] # 右上纬度
|
|
||||||
TopRightLongitude = xml_dict['ProductMetaData']['TopRightLongitude'] # 右上经度
|
|
||||||
BottomRightLatitude = xml_dict['ProductMetaData']['BottomRightLatitude'] # 右下纬度
|
|
||||||
BottomRightLongitude = xml_dict['ProductMetaData']['BottomRightLongitude'] # 右下经度
|
|
||||||
BottomLeftLatitude = xml_dict['ProductMetaData']['BottomLeftLatitude'] # 左下纬度
|
|
||||||
BottomLeftLongitude = xml_dict['ProductMetaData']['BottomLeftLongitude'] # 左下经度
|
|
||||||
boundaryGeomStr = f'POLYGON(({TopLeftLatitude} {TopLeftLongitude},' \
|
|
||||||
f'{TopRightLatitude} {TopRightLongitude},' \
|
|
||||||
f'{BottomRightLatitude} {BottomRightLongitude},' \
|
|
||||||
f'{BottomLeftLatitude} {BottomLeftLongitude},' \
|
|
||||||
f'{TopLeftLatitude} {TopLeftLongitude}))'
|
|
||||||
# ThumbnailPath 存储后的缩略图的路径+文件名 ThumbnailName 缩略图文件名称
|
|
||||||
# xmlPath 存储后的xml文件路径+文件名 xmlFileName xml文件名称
|
|
||||||
sensing_dict = {'StartTime': StartTime, 'EndTime': EndTime, 'CloudPercent': CloudPercent,
|
|
||||||
'boundaryGeomStr': boundaryGeomStr, 'DataFormat': DataFormat, 'NumberBands': NumberBands,
|
|
||||||
'ImageWidth': ImageWidth, 'ImageHeight': ImageHeight, 'GeographicScope': GeographicScope,
|
|
||||||
#'ReflectionParameter': ReflectionParameter, 'ProjectionDefinition': ProjectionDefinition,
|
|
||||||
#'FirstBindMax': FirstBindMax, 'FirstBindMin': FirstBindMin,
|
|
||||||
'FirstBindAverage': FirstBindAverage,
|
|
||||||
'ProjectedCoordinates': ProjectedCoordinates, 'RowNumber': RowNumber,
|
|
||||||
#'CenterProjectionCoordinates': CenterProjectionCoordinates,
|
|
||||||
'CollectionCode': CollectionCode,
|
|
||||||
"ThumbnailPath": file_path+"GF4_IRS_E119.8_N35.3_20210908_L1A0000417337_thumb.jpg",
|
|
||||||
"ThumbnailName": "GF4_IRS_E119.8_N35.3_20210908_L1A0000417337_thumb.jpg",
|
|
||||||
"xmlPath": "", "xmlFileName": "",
|
|
||||||
'DirectoryDepth': 'day'}
|
|
||||||
return sensing_dict
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
file_path = 'C:/Users/HP/Desktop/Number tube/GF4_PMI_E119.8_N35.3_20210908_L1A0000417337/GF4_PMS_E119.8_N35.3_20210908_L1A0000417337.tiff'
|
|
||||||
xml_path = 'C:/Users/HP/Desktop/Number tube/GF4_PMI_E119.8_N35.3_20210908_L1A0000417337/GF4_PMS_E119.8_N35.3_20210908_L1A0000417337.xml'
|
|
||||||
gf4_pmi_001(file_path, xml_path)
|
|
File diff suppressed because it is too large
Load Diff
@ -1,6 +1,5 @@
|
|||||||
# coding: utf-8
|
# coding: utf-8
|
||||||
# Author:boxker
|
# Author:tajochen
|
||||||
# Mail:icjb@foxmail.com
|
|
||||||
|
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import os
|
import os
|
||||||
@ -13,79 +12,84 @@ class SimpleSQLite3Tool:
|
|||||||
编写这个类主要是为了封装sqlite,继承此类复用方法
|
编写这个类主要是为了封装sqlite,继承此类复用方法
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, filename="stsql"):
|
|
||||||
"""
|
|
||||||
初始化数据库,默认文件名 stsql.db
|
|
||||||
filename:文件名
|
|
||||||
"""
|
|
||||||
self.filename = filename
|
|
||||||
self.db = sqlite3.connect(self.filename)
|
|
||||||
self.c = self.db.cursor()
|
|
||||||
|
|
||||||
def close(self):
|
def __init__(self, filename="stsql"):
|
||||||
"""
|
"""
|
||||||
关闭数据库
|
初始化数据库,默认文件名 stsql.db
|
||||||
"""
|
filename:文件名
|
||||||
self.c.close()
|
"""
|
||||||
self.db.close()
|
self.filename = filename
|
||||||
|
self.db = sqlite3.connect(self.filename)
|
||||||
|
self.c = self.db.cursor()
|
||||||
|
|
||||||
def execute(self, sql, param=None):
|
|
||||||
"""
|
|
||||||
执行数据库的增、删、改
|
|
||||||
sql:sql语句
|
|
||||||
param:数据,可以是list或tuple,亦可是None
|
|
||||||
retutn:成功返回True
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
if param is None:
|
|
||||||
self.c.execute(sql)
|
|
||||||
else:
|
|
||||||
if type(param) is list:
|
|
||||||
self.c.executemany(sql, param)
|
|
||||||
else:
|
|
||||||
self.c.execute(sql, param)
|
|
||||||
count = self.db.total_changes
|
|
||||||
self.db.commit()
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
return False, e
|
|
||||||
if count > 0:
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def query(self, sql, param=None):
|
def close(self):
|
||||||
"""
|
"""
|
||||||
查询语句
|
关闭数据库
|
||||||
sql:sql语句
|
"""
|
||||||
param:参数,可为None
|
self.c.close()
|
||||||
retutn:成功返回True
|
self.db.close()
|
||||||
"""
|
|
||||||
|
|
||||||
|
def execute(self, sql, param=None):
|
||||||
|
"""
|
||||||
|
执行数据库的增、删、改
|
||||||
|
sql:sql语句
|
||||||
|
param:数据,可以是list或tuple,亦可是None
|
||||||
|
return:成功返回True
|
||||||
|
"""
|
||||||
|
try:
|
||||||
if param is None:
|
if param is None:
|
||||||
self.c.execute(sql)
|
self.c.execute(sql)
|
||||||
else:
|
else:
|
||||||
self.c.execute(sql, param)
|
if type(param) is list:
|
||||||
return self.c.fetchall()
|
self.c.executemany(sql, param)
|
||||||
|
else:
|
||||||
|
self.c.execute(sql, param)
|
||||||
|
count = self.db.total_changes
|
||||||
|
self.db.commit()
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
return False, e
|
||||||
|
if count > 0:
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
# def set(self,table,field=" * ",where="",isWhere=False):
|
|
||||||
# self.table = table
|
def query(self, sql, param=None):
|
||||||
# self.filed = field
|
"""
|
||||||
# if where != "" :
|
查询语句
|
||||||
# self.where = where
|
sql:sql语句
|
||||||
# self.isWhere = True
|
param:参数,可为None
|
||||||
# return True
|
return:成功返回True
|
||||||
|
"""
|
||||||
|
if param is None:
|
||||||
|
self.c.execute(sql)
|
||||||
|
else:
|
||||||
|
self.c.execute(sql, param)
|
||||||
|
return self.c.fetchall()
|
||||||
|
|
||||||
|
|
||||||
|
# def set(self,table,field=" * ",where="",isWhere=False):
|
||||||
|
# self.table = table
|
||||||
|
# self.filed = field
|
||||||
|
# if where != "" :
|
||||||
|
# self.where = where
|
||||||
|
# self.isWhere = True
|
||||||
|
# return True
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
# 数据库文件位置
|
# 数据库文件位置
|
||||||
sql = SimpleSQLite3Tool("../dms_client.db")
|
sql = SimpleSQLite3Tool("../test.db")
|
||||||
# f = sql.execute("create table test (id int not null,name text not null,age int);")
|
f = sql.execute("create table test (id int not null,name text not null,age int);")
|
||||||
# print("ok")
|
print("ok")
|
||||||
# sql.execute("insert into test (id,name,age) values (?,?,?);", [(1, 'abc', 15), (2, 'bca', 16)])
|
sql.execute("insert into test (id,name,age) values (?,?,?);", [(1, 'abc', 15), (2, 'bca', 16)])
|
||||||
# res = sql.query("select * from test;")
|
res = sql.query("select * from test;")
|
||||||
# print(res)
|
print(res)
|
||||||
# sql.execute("insert into test (id,name) values (?,?);", (3, 'bac'))
|
sql.execute("insert into test (id,name) values (?,?);", (3, 'bac'))
|
||||||
# res = sql.query("select * from test where id=?;", (3,))
|
res = sql.query("select * from test where id=?;", (3,))
|
||||||
res = sql.query("select * from data_collection_info;")
|
res = sql.query("select * from data_collection_info;")
|
||||||
print(res)
|
print(res)
|
||||||
sql.close()
|
sql.close()
|
||||||
|
@ -8,8 +8,7 @@ import sys
|
|||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
from hashlib import md5
|
from hashlib import md5
|
||||||
|
import time
|
||||||
from util.file_store_path import file_store_path_year, file_store_path_month, file_store_path_day
|
|
||||||
|
|
||||||
FILE_DIR = os.path.dirname(__file__)
|
FILE_DIR = os.path.dirname(__file__)
|
||||||
|
|
||||||
@ -26,6 +25,27 @@ def bar(num=1, sum=100):
|
|||||||
sys.stdout.write(temp)
|
sys.stdout.write(temp)
|
||||||
|
|
||||||
|
|
||||||
|
def file_store_path(time_stamp):
|
||||||
|
"""
|
||||||
|
:param time_stamp: 时间戳类型时间
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
now = int(round(time_stamp * 1000))
|
||||||
|
t = time.localtime(now / 1000)
|
||||||
|
return os.path.join('E:/data/upload', str(t[0]), str(t[1]), str(t[2]))
|
||||||
|
|
||||||
|
|
||||||
|
def file_store_path_year(data_str_time, upload_path, conllection_code):
|
||||||
|
"""
|
||||||
|
目录到年
|
||||||
|
:param upload_path:
|
||||||
|
:param data_str_time: 字符串类型
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
t = time.strptime(data_str_time, '%Y-%m-%d %H:%M:%S')
|
||||||
|
return os.path.join(upload_path, conllection_code, str(t[0]))
|
||||||
|
|
||||||
|
|
||||||
def md5_file(name):
|
def md5_file(name):
|
||||||
m = md5()
|
m = md5()
|
||||||
a_file = open(name, 'rb') # 需要使用二进制格式读取文件内容
|
a_file = open(name, 'rb') # 需要使用二进制格式读取文件内容
|
||||||
@ -34,6 +54,28 @@ def md5_file(name):
|
|||||||
return m.hexdigest()
|
return m.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
|
def file_store_path_month(data_str_time, upload_path, conllection_code):
|
||||||
|
"""
|
||||||
|
目录到月
|
||||||
|
:param upload_path:
|
||||||
|
:param data_str_time:
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
t = time.strptime(data_str_time, '%Y-%m-%d %H:%M:%S')
|
||||||
|
return os.path.join(upload_path, conllection_code, str(t[0]), str(t[1]))
|
||||||
|
|
||||||
|
|
||||||
|
def file_store_path_day(data_str_time, upload_path, conllection_code):
|
||||||
|
"""
|
||||||
|
目录到日
|
||||||
|
:param upload_path:
|
||||||
|
:param data_str_time: 字符串类型的时间
|
||||||
|
:return:
|
||||||
|
"""
|
||||||
|
t = time.strptime(data_str_time, '%Y-%m-%d %H:%M:%S')
|
||||||
|
return os.path.join(upload_path, conllection_code, str(t[0]), str(t[1]), str(t[2]))
|
||||||
|
|
||||||
|
|
||||||
def upload_file_client(file_path, depth, dateTime, collectionCode):
|
def upload_file_client(file_path, depth, dateTime, collectionCode):
|
||||||
ck = socket.socket()
|
ck = socket.socket()
|
||||||
ck.connect(('192.168.2.9', 9002))
|
ck.connect(('192.168.2.9', 9002))
|
78
util/zxby.py
78
util/zxby.py
@ -1,78 +0,0 @@
|
|||||||
"""
|
|
||||||
Author : XinYi Song
|
|
||||||
Time : 2021/10/9 9:43
|
|
||||||
Desc:
|
|
||||||
"""
|
|
||||||
import os, sys, subprocess, tempfile, time
|
|
||||||
|
|
||||||
# 创建临时文件夹,返回临时文件夹路径
|
|
||||||
TempFile = tempfile.mkdtemp(suffix='_test', prefix='python_')
|
|
||||||
# 文件名
|
|
||||||
FileNum = int(time.time() * 1000)
|
|
||||||
# python编译器位置
|
|
||||||
EXEC = sys.executable
|
|
||||||
|
|
||||||
|
|
||||||
# 获取python版本
|
|
||||||
def get_version():
|
|
||||||
v = sys.version_info
|
|
||||||
version = "python %s.%s" % (v.major, v.minor)
|
|
||||||
return version
|
|
||||||
|
|
||||||
|
|
||||||
# 获得py文件名
|
|
||||||
def get_pyname():
|
|
||||||
global FileNum
|
|
||||||
return 'test_%d' % FileNum
|
|
||||||
|
|
||||||
|
|
||||||
# 接收代码写入文件
|
|
||||||
def write_file(pyname, code):
|
|
||||||
fpath = os.path.join(TempFile, '%s.py' % pyname)
|
|
||||||
with open(fpath, 'w', encoding='utf-8') as f:
|
|
||||||
f.write(code)
|
|
||||||
print('file path: %s' % fpath)
|
|
||||||
return fpath
|
|
||||||
|
|
||||||
|
|
||||||
# 编码
|
|
||||||
def decode(s):
|
|
||||||
try:
|
|
||||||
return s.decode('utf-8')
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
return s.decode('gbk')
|
|
||||||
|
|
||||||
# 主执行函数
|
|
||||||
|
|
||||||
|
|
||||||
def main(code):
|
|
||||||
r = dict()
|
|
||||||
r["version"] = get_version()
|
|
||||||
pyname = get_pyname()
|
|
||||||
fpath = write_file(pyname, code)
|
|
||||||
try:
|
|
||||||
# subprocess.check_output 是 父进程等待子进程完成,返回子进程向标准输出的输出结果
|
|
||||||
# stderr是标准输出的类型
|
|
||||||
outdata = decode(subprocess.check_output([EXEC, fpath], stderr=subprocess.STDOUT, timeout=5))
|
|
||||||
except subprocess.CalledProcessError as e:
|
|
||||||
# e.output是错误信息标准输出
|
|
||||||
# 错误返回的数据
|
|
||||||
r["code"] = 'Error'
|
|
||||||
r["output"] = decode(e.output)
|
|
||||||
return r
|
|
||||||
else:
|
|
||||||
# 成功返回的数据
|
|
||||||
r['output'] = outdata
|
|
||||||
return r
|
|
||||||
finally:
|
|
||||||
# 删除文件(其实不用删除临时文件会自动删除)
|
|
||||||
try:
|
|
||||||
os.remove(fpath)
|
|
||||||
except Exception as e:
|
|
||||||
exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
# if __name__ == '__main__':
|
|
||||||
# code = "print(11);print(22)"
|
|
||||||
# print(main(code))
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user