1、搭建flask框架。
2、整合JPSS,葵花8,GF3,哨兵1,哨兵2,哨兵3,资源2号,环境1号,SNPP等遥感数据解析算法。 3、flask中添加扫描各个卫星扫描任务,定时扫描,数据入库
This commit is contained in:
0
util/__init__.py
Normal file
0
util/__init__.py
Normal file
BIN
util/__pycache__/__init__.cpython-39.pyc
Normal file
BIN
util/__pycache__/__init__.cpython-39.pyc
Normal file
Binary file not shown.
BIN
util/__pycache__/file_store_path.cpython-39.pyc
Normal file
BIN
util/__pycache__/file_store_path.cpython-39.pyc
Normal file
Binary file not shown.
BIN
util/__pycache__/http_file_upload.cpython-39.pyc
Normal file
BIN
util/__pycache__/http_file_upload.cpython-39.pyc
Normal file
Binary file not shown.
BIN
util/__pycache__/http_util.cpython-39.pyc
Normal file
BIN
util/__pycache__/http_util.cpython-39.pyc
Normal file
Binary file not shown.
BIN
util/__pycache__/remote_sensing_util.cpython-39.pyc
Normal file
BIN
util/__pycache__/remote_sensing_util.cpython-39.pyc
Normal file
Binary file not shown.
BIN
util/__pycache__/scan_file_util.cpython-39.pyc
Normal file
BIN
util/__pycache__/scan_file_util.cpython-39.pyc
Normal file
Binary file not shown.
BIN
util/__pycache__/xml_util.cpython-39.pyc
Normal file
BIN
util/__pycache__/xml_util.cpython-39.pyc
Normal file
Binary file not shown.
42
util/copy_util.py
Normal file
42
util/copy_util.py
Normal file
@ -0,0 +1,42 @@
|
||||
"""
|
||||
Author : XinYi Song
|
||||
Time : 2021/10/13 10:13
|
||||
Desc: 复制文件
|
||||
"""
|
||||
import os
|
||||
from shutil import copy
|
||||
|
||||
|
||||
# 将文件复制到另一个文件夹中
|
||||
def copyToDir(from_path, to_path):
|
||||
# 如果 to_path 目录不存在,则创建
|
||||
if not os.path.isdir(to_path):
|
||||
os.makedirs(to_path)
|
||||
copy(from_path, to_path)
|
||||
|
||||
|
||||
# 将一个文件夹中所有的文件复制到另一个文件夹中
|
||||
def copyToDirAll(path, path_two):
|
||||
"""
|
||||
:param path: 路径1
|
||||
:param path_two: 路径2
|
||||
:return:
|
||||
"""
|
||||
if os.path.isdir(path) and os.path.isdir(path_two): # 判断传入的值为文件夹
|
||||
a = os.listdir(path) # 读取该路径下的文件为列表
|
||||
for i in a:
|
||||
po = os.path.join(path, i) # 路径1拼接
|
||||
po_two = os.path.join(path_two, i) # 路径2拼接
|
||||
with open(po, "rb") as f:
|
||||
res_one = f.read()
|
||||
with open(po_two, "wb") as a:
|
||||
a.write(res_one)
|
||||
print("{}复制成功".format(i))
|
||||
else:
|
||||
print("不是文件夹 ")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
path1 = 'D:/img'
|
||||
path_two1 = 'D:/image'
|
||||
copyToDirAll(path1, path_two1)
|
84
util/file_store_path.py
Normal file
84
util/file_store_path.py
Normal file
@ -0,0 +1,84 @@
|
||||
"""
|
||||
Author : XinYi Song
|
||||
Time : 2021/11/4 9:27
|
||||
Desc:
|
||||
"""
|
||||
import datetime
|
||||
import os
|
||||
import time
|
||||
|
||||
|
||||
def file_store_path(time_stamp):
|
||||
"""
|
||||
:param time_stamp: 时间戳类型时间
|
||||
:return:
|
||||
"""
|
||||
now = int(round(time_stamp * 1000))
|
||||
t = time.localtime(now / 1000)
|
||||
return os.path.join('E:/data/upload', str(t[0]), str(t[1]), str(t[2]))
|
||||
|
||||
|
||||
def file_store_path_year(data_str_time, upload_path):
|
||||
"""
|
||||
目录到年
|
||||
:param upload_path:
|
||||
:param data_str_time: 字符串类型
|
||||
:return:
|
||||
"""
|
||||
t = time.strptime(data_str_time, '%Y-%m-%d %H:%M:%S')
|
||||
return os.path.join(upload_path, str(t[0]))
|
||||
|
||||
|
||||
def file_store_path_month(data_str_time, upload_path):
|
||||
"""
|
||||
目录到月
|
||||
:param upload_path:
|
||||
:param data_str_time:
|
||||
:return:
|
||||
"""
|
||||
t = time.strptime(data_str_time, '%Y-%m-%d %H:%M:%S')
|
||||
return os.path.join(upload_path, str(t[0]), str(t[1]))
|
||||
|
||||
|
||||
def file_store_path_day(data_str_time, upload_path):
|
||||
"""
|
||||
目录到日
|
||||
:param upload_path:
|
||||
:param data_str_time: 字符串类型的时间
|
||||
:return:
|
||||
"""
|
||||
t = time.strptime(data_str_time, '%Y-%m-%d %H:%M:%S')
|
||||
return os.path.join(upload_path, str(t[0]), str(t[1]), str(t[2]))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# time_stamp1 = time.time()
|
||||
# print(time_stamp1)
|
||||
str_time = '2020-06-08 09:33:07'
|
||||
t = time.strptime(str_time, '%Y-%m-%d %H:%M:%S')
|
||||
# path = os.path.join('../upload', str(t[0]), str(t[1]), str(t[2]))
|
||||
# if not os.path.exists(path):
|
||||
# os.makedirs(path)
|
||||
# print(path)
|
||||
# time_stamp = float(time_stamp1)
|
||||
# now = int(round(time_stamp * 1000))
|
||||
# t = time.localtime(now / 1000)
|
||||
# print(t)
|
||||
|
||||
# list1 = ['张三', '李四']
|
||||
# token_s = dms_login()
|
||||
# dms_list = dms_sensing_data(token_s)
|
||||
# 数据库返回值
|
||||
# list2 = ['张三', '李四']
|
||||
|
||||
# d = [y for y in list2 if y not in list1]
|
||||
# if d is None or len(d) == 0:
|
||||
# print("d为空")
|
||||
# else:
|
||||
# print(d)
|
||||
# file_dir = 'C:/Users/HP/Desktop/数管/'
|
||||
# dir_list = os.listdir(file_dir)
|
||||
# print(dir_list)
|
||||
# timestring = '2016-12-21 10:22:56'
|
||||
# print(time.mktime(time.strptime(timestring, '%Y-%m-%d %H:%M:%S'))) # 1482286976.0
|
||||
print(t)
|
86
util/http_file_upload.py
Normal file
86
util/http_file_upload.py
Normal file
@ -0,0 +1,86 @@
|
||||
"""
|
||||
Author : XinYi Song
|
||||
Time : 2021/11/3 14:29
|
||||
Desc:
|
||||
"""
|
||||
from util.file_store_path import file_store_path_day, file_store_path_year, file_store_path_month
|
||||
|
||||
"""
|
||||
实现文件断点续传
|
||||
"""
|
||||
import sys
|
||||
import os
|
||||
from hashlib import md5
|
||||
|
||||
FILE_DIR = os.path.dirname(__file__)
|
||||
|
||||
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
|
||||
home = os.path.join(BASE_DIR, "E:/data/upload")
|
||||
|
||||
|
||||
# 定义一个函数,计算进度条
|
||||
def bar(num=1, sum=100):
|
||||
rate = float(num) / float(sum)
|
||||
rate_num = int(rate * 100)
|
||||
temp = '\r%d %%' % rate_num
|
||||
sys.stdout.write(temp)
|
||||
|
||||
|
||||
def md5_file(name):
|
||||
m = md5()
|
||||
a_file = open(name, 'rb') #需要使用二进制格式读取文件内容
|
||||
m.update(a_file.read())
|
||||
a_file.close()
|
||||
return m.hexdigest()
|
||||
|
||||
|
||||
def upload_client(local_path, depth, dateTime):
|
||||
global file_path
|
||||
while True:
|
||||
file_byte_size = os.stat(local_path).st_size # 获取文件的大小
|
||||
file_name = os.path.basename(local_path) # 设置文件名
|
||||
md5 = md5_file(local_path)
|
||||
|
||||
has_sent = 0
|
||||
file_obj = open(local_path, 'rb') # 对文件进行读操作
|
||||
file_obj.seek(has_sent) # 调整指针
|
||||
if depth == 'year':
|
||||
file_path = file_store_path_year(dateTime, home)
|
||||
if not os.path.exists(file_path):
|
||||
os.makedirs(file_path)
|
||||
if depth == 'month':
|
||||
file_path = file_store_path_month(dateTime, home)
|
||||
if not os.path.exists(file_path):
|
||||
os.makedirs(file_path)
|
||||
if depth == 'day':
|
||||
file_path = file_store_path_day(dateTime, home)
|
||||
if not os.path.exists(file_path):
|
||||
os.makedirs(file_path)
|
||||
path = os.path.join(file_path, file_name)
|
||||
has_received = 0
|
||||
|
||||
# 首先判断该路径下是否已存在文件
|
||||
if os.path.exists(path):
|
||||
f = open(path, 'wb')
|
||||
else:
|
||||
f = open(path, 'wb')
|
||||
|
||||
while has_sent < file_byte_size:
|
||||
# 读出数据
|
||||
data = file_obj.read(1024)
|
||||
try:
|
||||
# 写入数据
|
||||
f.write(data)
|
||||
has_received += len(data)
|
||||
if not data:
|
||||
raise Exception
|
||||
except Exception:
|
||||
flag = False
|
||||
break
|
||||
has_sent += len(data)
|
||||
bar(has_sent, file_byte_size) # 进度条
|
||||
print("文件上传成功!")
|
||||
file_obj.close()
|
||||
f.close()
|
||||
file_dict = {'fileName': file_name, 'md5': md5, 'file_size': file_byte_size, 'file_path': file_path, 'type': 'ok'}
|
||||
return file_dict
|
158
util/http_util.py
Normal file
158
util/http_util.py
Normal file
@ -0,0 +1,158 @@
|
||||
# 导入requests包
|
||||
import json
|
||||
import os
|
||||
from typing import Dict
|
||||
|
||||
import requests
|
||||
from requests_toolbelt import MultipartEncoder
|
||||
|
||||
|
||||
class httpUtil(object):
|
||||
|
||||
def __init__(self, *, url: str, params: Dict = None, data: Dict = None, file_path: str = None, token: str = None):
|
||||
self.url = url
|
||||
self.params = params
|
||||
self.file_path = file_path
|
||||
self.data = data
|
||||
self.token = token
|
||||
|
||||
def get_no_param(self):
|
||||
"""
|
||||
get请求, 无参数
|
||||
:return:
|
||||
"""
|
||||
# 发送请求,获取响应, res即返回的响应对象
|
||||
res = requests.get(url=self.url)
|
||||
return res
|
||||
|
||||
def get(self):
|
||||
"""
|
||||
get请求
|
||||
:return:
|
||||
"""
|
||||
# 发送请求,获取响应, Res即返回的响应对象
|
||||
try:
|
||||
res = requests.get(url=self.url, params=self.params)
|
||||
return res
|
||||
except:
|
||||
return -1
|
||||
|
||||
def get_herder(self):
|
||||
"""
|
||||
get请求,带token
|
||||
:return:
|
||||
"""
|
||||
# 发送请求,获取响应, Res即返回的响应对象
|
||||
try:
|
||||
headers = {
|
||||
""
|
||||
"Authorization": self.token
|
||||
}
|
||||
res = requests.get(url=self.url, params=self.params, headers=headers)
|
||||
return res
|
||||
except:
|
||||
return -1
|
||||
|
||||
def post_no_patam(self):
|
||||
"""
|
||||
post请求
|
||||
:return:
|
||||
"""
|
||||
# 多行文本, 字符串格式,也可以单行(注意外层有引号,为字符串) data = '{"name": "hanzhichao", "age": 18}'
|
||||
# data支持字典或字符串
|
||||
data = json.dumps(self.data)
|
||||
res = requests.post(url=self.url, data=data)
|
||||
return res
|
||||
|
||||
def post_no_patam_herder(self):
|
||||
"""
|
||||
post请求
|
||||
:return:
|
||||
"""
|
||||
# 多行文本, 字符串格式,也可以单行(注意外层有引号,为字符串) data = '{"name": "hanzhichao", "age": 18}'
|
||||
# data支持字典或字符串
|
||||
headers = {
|
||||
"Content-Type": "application/json",
|
||||
"Authorization": self.token
|
||||
}
|
||||
res = requests.post(url=self.url, data=json.dumps(self.data), headers=headers)
|
||||
return res
|
||||
|
||||
def post_patam_herder(self):
|
||||
"""
|
||||
post请求
|
||||
:return:
|
||||
"""
|
||||
# 多行文本, 字符串格式,也可以单行(注意外层有引号,为字符串) data = '{"name": "hanzhichao", "age": 18}'
|
||||
# data支持字典或字符串
|
||||
headers = {
|
||||
"Authorization": self.token
|
||||
}
|
||||
res = requests.post(url=self.url, param=self.params, headers=headers)
|
||||
return res
|
||||
|
||||
def post(self):
|
||||
"""
|
||||
post请求
|
||||
:return:
|
||||
"""
|
||||
# 多行文本, 字符串格式,也可以单行(注意外层有引号,为字符串) data = '{"name": "hanzhichao", "age": 18}'
|
||||
# data支持字典或字符串
|
||||
res = requests.post(url=self.url, data=json.dumps(self.data), params=self.params)
|
||||
return res
|
||||
|
||||
def post_param(self):
|
||||
"""
|
||||
post请求
|
||||
:return:
|
||||
"""
|
||||
# 多行文本, 字符串格式,也可以单行(注意外层有引号,为字符串) data = '{"name": "hanzhichao", "age": 18}'
|
||||
# data支持字典或字符串
|
||||
res = requests.post(url=self.url, params=self.params)
|
||||
return res
|
||||
|
||||
def post_file(self):
|
||||
"""
|
||||
发送文件
|
||||
"""
|
||||
filepath, filename = os.path.split(self.file_path)
|
||||
payload = {
|
||||
'file': (filename, open(self.file_path, 'rb'))
|
||||
}
|
||||
m = MultipartEncoder(payload)
|
||||
headers = {
|
||||
"Content-Type": m.content_type,
|
||||
"other-keys": "other-values"
|
||||
}
|
||||
res = requests.post(url=self.url, data=m, params=self.params, headers=headers)
|
||||
return res
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
res = httpUtil(url='http://192.168.2.105:8820/api/login',
|
||||
params={"userName": "client1", "password": "sxy1998"}).post_param()
|
||||
print(res.json()['data'])
|
||||
|
||||
token_s = res.json()['data']
|
||||
# res1 = httpUtil(url='http://192.168.2.105:8820/api/data-storage-task-record/get/collection-code/revision',
|
||||
# params={"collectionCode": "GF4-0001", "revision": 1}, token=token_s).get_herder()
|
||||
# print(res1.json())
|
||||
#
|
||||
# res2 = httpUtil(url='http://192.168.2.105:8820/api/remote-sensing-data/get/collection-code',
|
||||
# params={"collectionCode": "GF4-0001"}, token=token_s).get_herder()
|
||||
# print(res2.json())
|
||||
|
||||
res3 = httpUtil(url='http://192.168.2.105:8820/api/dic-remote-sensing-data/get/code',
|
||||
params={"code": "GF4-0001"}, token=token_s).get_herder()
|
||||
print(res3.json())
|
||||
# res = httpUtil(url='http://192.168.2.105:8820/api/data-storage-task-record/add',
|
||||
# data={"clientCode": "client1", "collectionCode": "GF4-0001", "storageFileList": "file_total_name",
|
||||
# "storageFileSizeList": "file_total_name", "remarks": ""}, token=token_s).post_no_patam_herder()
|
||||
# print(res.json())
|
||||
# res = httpUtil(url='http://192.168.2.105:8820/api/data-storage-task-record/end',
|
||||
# params={"taskCode": 6856963234793680896}, token=token_s).post_patam_herder()
|
||||
# print(res.json())
|
||||
# header = {"Authorization": token_s}
|
||||
# res = requests.post(url='http://192.168.2.105:8820/api/data-storage-task-record/end',
|
||||
# params={"taskCode": 6856963234793680896}, headers=header).json()
|
||||
# print(res)
|
44
util/json_util.py
Normal file
44
util/json_util.py
Normal file
@ -0,0 +1,44 @@
|
||||
import json
|
||||
import os
|
||||
|
||||
|
||||
def write_info(file_name, file_info):
|
||||
dir = os.path.dirname(file_name)
|
||||
if not os.path.exists(dir):
|
||||
os.makedirs(dir)
|
||||
with open('{}.json'.format(file_name), 'w', encoding='UTF-8') as fp:
|
||||
json.dump(file_info, fp, indent=4, sort_keys=False)
|
||||
|
||||
|
||||
def read_json(file_path):
|
||||
with open(file_path, 'r') as f:
|
||||
data = json.load(f)
|
||||
return data
|
||||
|
||||
|
||||
report_data = {"project_no": "628740635893760", "img_path": "2000.png",
|
||||
"create_time": "2021-06-10T11:17:12.202000+00:00", "labels": [
|
||||
{"shape": "polygon", "name": "a", "line_width": 2, "width": 750.0, "height": 788.0, "comment": "string",
|
||||
"color": "rgb(255, 0, 123)", "is_match": "False", "score": 0.0,
|
||||
"point": [{"x": 114.04715127701375, "y": 53.04518664047151}, {"x": 196.2671905697446, "y": 53.04518664047151},
|
||||
{"x": 196.2671905697446, "y": 149.4106090373281},
|
||||
{"x": 114.04715127701375, "y": 149.4106090373281}]},
|
||||
{"shape": "polygon", "name": "a", "line_width": 2, "width": 750.0, "height": 788.0, "comment": "string",
|
||||
"color": "rgb(255, 0, 123)", "is_match": "False", "score": 0.0,
|
||||
"point": [{"x": 284.67583497053045, "y": 64.53831041257367}, {"x": 401.3752455795678, "y": 64.53831041257367},
|
||||
{"x": 401.3752455795678, "y": 266.1100196463654},
|
||||
{"x": 284.67583497053045, "y": 266.1100196463654}]},
|
||||
{"shape": "polygon", "name": "a", "line_width": 2, "width": 750.0, "height": 788.0, "comment": "string",
|
||||
"color": "rgb(255, 0, 123)", "is_match": "False", "score": 0.0,
|
||||
"point": [{"x": 501.2770137524558, "y": 148.52652259332024}, {"x": 623.2809430255403, "y": 148.52652259332024},
|
||||
{"x": 623.2809430255403, "y": 320.0392927308448},
|
||||
{"x": 501.2770137524558, "y": 320.0392927308448}]}]}
|
||||
|
||||
if __name__ == "__main__":
|
||||
path = os.path.abspath(os.path.dirname(__file__))
|
||||
print(path)
|
||||
# write_info('', dict(report_data))
|
||||
# read_json('d://report.json')
|
||||
# s = '/group1/628740635893760/ori/images/7 (1).png'
|
||||
# s1 = s.replace('images', 'labels')
|
||||
# print(s[2])
|
16
util/md5_util.py
Normal file
16
util/md5_util.py
Normal file
@ -0,0 +1,16 @@
|
||||
import hashlib
|
||||
|
||||
|
||||
class Md5Util(object):
|
||||
|
||||
def __init__(self, *, salt: str, password: str):
|
||||
self.salt = salt
|
||||
self.password = password
|
||||
|
||||
def md5(self):
|
||||
# 实例化对象, 加盐
|
||||
obj = hashlib.md5(self.salt.encode('utf-8'))
|
||||
# 加密密码
|
||||
obj.update(self.password.encode('utf-8'))
|
||||
# 提取密码,返回
|
||||
return obj.hexdigest()
|
91
util/remote_sensing_util.py
Normal file
91
util/remote_sensing_util.py
Normal file
@ -0,0 +1,91 @@
|
||||
"""
|
||||
Author : XinYi Song
|
||||
Time : 2021/11/4 16:59
|
||||
Desc:
|
||||
"""
|
||||
import rasterio
|
||||
import requests
|
||||
|
||||
from util.xml_util import xml_to_dict, dict_to_xml
|
||||
|
||||
|
||||
def gf4_pmi_001(file_name, xml_name):
|
||||
"""
|
||||
|
||||
:param file_name: 扫描文件传过来的遥感数据源文件的路径
|
||||
:param xmlPath: 解析出的xml文件存储的路径
|
||||
:param ThumbnailPath: 解析出的缩略图文件存储的路径
|
||||
:return:
|
||||
"""
|
||||
file_path = 'E:/sensing/GF4_PMI_001/'
|
||||
with rasterio.open(file_path+file_name, 'r') as ds:
|
||||
# 存放xml,缩略图的文件夹,由根文件夹+数据集代码命名的文件夹组成
|
||||
print('该栅格数据的基本数据集信息:')
|
||||
CollectionCode = 'GF4_PMI_001' # 数据集代码
|
||||
DataFormat = ds.driver # DataFormat 数据格式
|
||||
NumberBands = ds.count # NumberBands 波段数目
|
||||
ImageWidth = ds.width # ImageWidth 影像宽度
|
||||
ImageHeight = ds.height # ImageHeight 影像高度
|
||||
GeographicScope = ds.bounds # GeographicScope 地理范围
|
||||
ReflectionParameter = ds.transform # ReflectionParameter 反射变换参数(六参数模型)
|
||||
ProjectionDefinition = ds.crs # ProjectionDefinition 投影定义
|
||||
# print(CRS.from_epsg(4326))
|
||||
# 获取第一个波段数据,跟GDAL一样索引从1开始
|
||||
# 直接获得numpy.ndarray类型的二维数组表示,如果read()函数不加参数,则得到所有波段(第一个维度是波段)
|
||||
band1 = ds.read(1)
|
||||
FirstBindMax = band1.max() # FirstBindMax 第一波段的最大值
|
||||
|
||||
FirstBindMin = band1.min() # FirstBindMin 第一波段的最小值
|
||||
FirstBindAverage = band1.mean() # FirstBindAverage 第一波段的平均值
|
||||
# 根据地理坐标得到行列号
|
||||
x, y = (ds.bounds.left + 300, ds.bounds.top - 300) # 距离左上角东300米,南300米的投影坐标
|
||||
row, col = ds.index(x, y) # 对应的行列号
|
||||
print(f'(投影坐标{x}, {y})对应的行列号是({row}, {col})')
|
||||
ProjectedCoordinates = x, y # ProjectedCoordinates 投影坐标
|
||||
RowNumber = row, col # RowNumber 对应的行列号
|
||||
# 根据行列号得到地理坐标
|
||||
x, y = ds.xy(row, col) # 中心点的坐标
|
||||
print(f'行列号({row}, {col})对应的中心投影坐标是({x}, {y})') #
|
||||
CenterProjectionCoordinates = x, y # CenterProjectionCoordinates 中心投影坐标
|
||||
# 'C:/Users/HP/Desktop/Number tube/GF4_PMI_E119.8_N35.3_20210908_L1A0000417337/GF4_PMS_E119.8_N35.3_20210908_L1A0000417337.xml'
|
||||
# 传入xml文件路径,解析xml文件
|
||||
# xml_name 存储后的xml文件的路径+文件名
|
||||
xml_dict = xml_to_dict(file_path+xml_name)
|
||||
StartTime = xml_dict['ProductMetaData']['StartTime'] # 开始采集时间
|
||||
EndTime = xml_dict['ProductMetaData']['EndTime'] # 结束采集时间
|
||||
CloudPercent = xml_dict['ProductMetaData']['CloudPercent'] # 云覆盖量百分比
|
||||
TopLeftLatitude = xml_dict['ProductMetaData']['TopLeftLatitude'] # 左上纬度
|
||||
TopLeftLongitude = xml_dict['ProductMetaData']['TopLeftLongitude'] # 左上经度
|
||||
TopRightLatitude = xml_dict['ProductMetaData']['TopRightLatitude'] # 右上纬度
|
||||
TopRightLongitude = xml_dict['ProductMetaData']['TopRightLongitude'] # 右上经度
|
||||
BottomRightLatitude = xml_dict['ProductMetaData']['BottomRightLatitude'] # 右下纬度
|
||||
BottomRightLongitude = xml_dict['ProductMetaData']['BottomRightLongitude'] # 右下经度
|
||||
BottomLeftLatitude = xml_dict['ProductMetaData']['BottomLeftLatitude'] # 左下纬度
|
||||
BottomLeftLongitude = xml_dict['ProductMetaData']['BottomLeftLongitude'] # 左下经度
|
||||
boundaryGeomStr = f'POLYGON(({TopLeftLatitude} {TopLeftLongitude},' \
|
||||
f'{TopRightLatitude} {TopRightLongitude},' \
|
||||
f'{BottomRightLatitude} {BottomRightLongitude},' \
|
||||
f'{BottomLeftLatitude} {BottomLeftLongitude},' \
|
||||
f'{TopLeftLatitude} {TopLeftLongitude}))'
|
||||
# ThumbnailPath 存储后的缩略图的路径+文件名 ThumbnailName 缩略图文件名称
|
||||
# xmlPath 存储后的xml文件路径+文件名 xmlFileName xml文件名称
|
||||
sensing_dict = {'StartTime': StartTime, 'EndTime': EndTime, 'CloudPercent': CloudPercent,
|
||||
'boundaryGeomStr': boundaryGeomStr, 'DataFormat': DataFormat, 'NumberBands': NumberBands,
|
||||
'ImageWidth': ImageWidth, 'ImageHeight': ImageHeight, 'GeographicScope': GeographicScope,
|
||||
#'ReflectionParameter': ReflectionParameter, 'ProjectionDefinition': ProjectionDefinition,
|
||||
#'FirstBindMax': FirstBindMax, 'FirstBindMin': FirstBindMin,
|
||||
'FirstBindAverage': FirstBindAverage,
|
||||
'ProjectedCoordinates': ProjectedCoordinates, 'RowNumber': RowNumber,
|
||||
#'CenterProjectionCoordinates': CenterProjectionCoordinates,
|
||||
'CollectionCode': CollectionCode,
|
||||
"ThumbnailPath": file_path+"GF4_IRS_E119.8_N35.3_20210908_L1A0000417337_thumb.jpg",
|
||||
"ThumbnailName": "GF4_IRS_E119.8_N35.3_20210908_L1A0000417337_thumb.jpg",
|
||||
"xmlPath": "", "xmlFileName": "",
|
||||
'DirectoryDepth': 'day'}
|
||||
return sensing_dict
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
file_path = 'C:/Users/HP/Desktop/Number tube/GF4_PMI_E119.8_N35.3_20210908_L1A0000417337/GF4_PMS_E119.8_N35.3_20210908_L1A0000417337.tiff'
|
||||
xml_path = 'C:/Users/HP/Desktop/Number tube/GF4_PMI_E119.8_N35.3_20210908_L1A0000417337/GF4_PMS_E119.8_N35.3_20210908_L1A0000417337.xml'
|
||||
gf4_pmi_001(file_path, xml_path)
|
1195
util/scan_file_util.py
Normal file
1195
util/scan_file_util.py
Normal file
File diff suppressed because it is too large
Load Diff
103
util/snow_ari.py
Normal file
103
util/snow_ari.py
Normal file
@ -0,0 +1,103 @@
|
||||
import sys
|
||||
import time
|
||||
import logging
|
||||
|
||||
|
||||
class MySnow(object):
|
||||
|
||||
def __init__(self, datacenter_id, worker_id):
|
||||
# 初始毫秒级时间戳(2021-06-09)
|
||||
self.initial_time_stamp = int(time.mktime(time.strptime('2021-06-09 00:00:00', "%Y-%m-%d %H:%M:%S")) * 1000)
|
||||
# 机器 ID 所占的位数
|
||||
self.worker_id_bits = 5
|
||||
# 数据表示 ID 所占的位数
|
||||
self.datacenter_id_bits = 5
|
||||
# 支持的最大机器 ID,结果是 31(这个位移算法可以很快的计算出几位二进制数所能表示的最大十进制数)
|
||||
# 2**5-1 0b11111
|
||||
self.max_worker_id = -1 ^ (-1 << self.worker_id_bits)
|
||||
# 支持最大标识 ID,结果是 31
|
||||
self.max_datacenter_id = -1 ^ (-1 << self.datacenter_id_bits)
|
||||
# 序列号 ID所占的位数
|
||||
self.sequence_bits = 12
|
||||
# 机器 ID 偏移量(12)
|
||||
self.workerid_offset = self.sequence_bits
|
||||
# 数据中心 ID 偏移量(12 + 5)
|
||||
self.datacenterid_offset = self.sequence_bits + self.datacenter_id_bits
|
||||
# 时间戳偏移量(12 + 5 + 5)
|
||||
self.timestamp_offset = self.sequence_bits + self.datacenter_id_bits + self.worker_id_bits
|
||||
# 生成序列的掩码,这里为 4095(0b111111111111 = 0xfff = 4095)
|
||||
self.sequence_mask = -1 ^ (-1 << self.sequence_bits)
|
||||
|
||||
# 初始化日志
|
||||
self.logger = logging.getLogger('snowflake')
|
||||
|
||||
# 数据中心 ID(0 ~ 31)
|
||||
if datacenter_id > self.max_datacenter_id or datacenter_id < 0:
|
||||
err_msg = 'datacenter_id 不能大于 %d 或小于 0' % self.max_worker_id
|
||||
self.logger.error(err_msg)
|
||||
sys.exit()
|
||||
self.datacenter_id = datacenter_id
|
||||
# 工作节点 ID(0 ~ 31)
|
||||
if worker_id > self.max_worker_id or worker_id < 0:
|
||||
err_msg = 'worker_id 不能大于 %d 或小于 0' % self.max_worker_id
|
||||
self.logger.error(err_msg)
|
||||
sys.exit()
|
||||
self.worker_id = worker_id
|
||||
# 毫秒内序列(0 ~ 4095)
|
||||
self.sequence = 0
|
||||
# 上次生成 ID 的时间戳
|
||||
self.last_timestamp = -1
|
||||
|
||||
def _gen_timestamp(self):
|
||||
"""
|
||||
生成整数毫秒级时间戳
|
||||
:return: 整数毫秒级时间戳
|
||||
"""
|
||||
return int(time.time() * 1000)
|
||||
|
||||
def next_id(self):
|
||||
"""
|
||||
获得下一个ID (用同步锁保证线程安全)
|
||||
:return: snowflake_id
|
||||
"""
|
||||
timestamp = self._gen_timestamp()
|
||||
# 如果当前时间小于上一次 ID 生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
|
||||
if timestamp < self.last_timestamp:
|
||||
self.logger.error('clock is moving backwards. Rejecting requests until {}'.format(self.last_timestamp))
|
||||
# 如果是同一时间生成的,则进行毫秒内序列
|
||||
if timestamp == self.last_timestamp:
|
||||
self.sequence = (self.sequence + 1) & self.sequence_mask
|
||||
# sequence 等于 0 说明毫秒内序列已经增长到最大值
|
||||
if self.sequence == 0:
|
||||
# 阻塞到下一个毫秒,获得新的时间戳
|
||||
timestamp = self._til_next_millis(self.last_timestamp)
|
||||
else:
|
||||
# 时间戳改变,毫秒内序列重置
|
||||
self.sequence = 0
|
||||
|
||||
# 上次生成 ID 的时间戳
|
||||
self.last_timestamp = timestamp
|
||||
|
||||
# 移位并通过或运算拼到一起组成 64 位的 ID
|
||||
new_id = ((timestamp - self.initial_time_stamp) << self.timestamp_offset) | \
|
||||
(self.datacenter_id << self.datacenterid_offset) | \
|
||||
(self.worker_id << self.workerid_offset) | \
|
||||
self.sequence
|
||||
return new_id
|
||||
|
||||
def _til_next_millis(self, last_timestamp):
|
||||
"""
|
||||
阻塞到下一个毫秒,直到获得新的时间戳
|
||||
:param last_timestamp: 上次生成 ID 的毫秒级时间戳
|
||||
:return: 当前毫秒级时间戳
|
||||
"""
|
||||
timestamp = self._gen_timestamp()
|
||||
while timestamp <= last_timestamp:
|
||||
timestamp = self._gen_timestamp()
|
||||
return timestamp
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
mysnow = MySnow(1, 2)
|
||||
id = mysnow.next_id()
|
||||
print(id)
|
50
util/xml_util.py
Normal file
50
util/xml_util.py
Normal file
@ -0,0 +1,50 @@
|
||||
"""
|
||||
Author : XinYi Song
|
||||
Time : 2021/10/12 11:13
|
||||
Desc: xml工具类
|
||||
"""
|
||||
from xml.dom.minidom import parseString
|
||||
import dict2xml
|
||||
import xmltodict
|
||||
import json
|
||||
import os
|
||||
|
||||
|
||||
# 初始化数据,传入参数1、xml文件路径 2、xml格式字符串 3、json字符串 4、dict字典
|
||||
def init_data(file_Str_Dict):
|
||||
if isinstance(file_Str_Dict, str) and os.path.isfile(file_Str_Dict):
|
||||
with open(file_Str_Dict) as fp:
|
||||
data = fp.read()
|
||||
return data
|
||||
elif isinstance(file_Str_Dict, (str, dict)):
|
||||
data = file_Str_Dict
|
||||
return data
|
||||
|
||||
|
||||
# 读取xml文件,转换成字典
|
||||
def xml_to_dict(file_Str_Dict):
|
||||
data = init_data(file_Str_Dict)
|
||||
|
||||
data_orderedD = xmltodict.parse(data)
|
||||
data_json = json.dumps(data_orderedD, indent=4)
|
||||
data_dict = json.loads(data_json)
|
||||
|
||||
return data_dict
|
||||
|
||||
|
||||
# 将字典写入到xml中并保存
|
||||
def dict_to_xml(dict_in, xml_out):
|
||||
xml_str = dict2xml.dict2xml(dict_in)
|
||||
xml_raw = '<?xml version="1.0" encoding="utf-8"?>\n' + '<xml>\n' + xml_str + '\n</xml>'
|
||||
dom = parseString(xml_raw.replace('\n', ''))
|
||||
pretty = dom.toprettyxml(indent=" ", newl="\n", encoding="utf-8")
|
||||
with open(xml_out, 'w') as f:
|
||||
f.write(pretty.decode("utf-8"))
|
||||
f.close()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
d = xml_to_dict("E:/xmltest/demo.xml")
|
||||
print(d)
|
||||
# a = {'ProductMetaData': {'SatelliteID': 'GF1', 'SensorID': 'PMS1', 'ReceiveTime': '2013-11-05 05:52:11', 'OrbitID': '2850', 'ProduceType': 'STANDARD', 'SceneID': '154627', 'ProductID': '107383', 'ProductLevel': 'LEVEL1A', 'ProductQuality': None, 'ProductQualityReport': None, 'ProductFormat': 'GEOTIFF', 'ProduceTime': '2013-11-05 17:27:10', 'Bands': '1,2,3,4', 'ScenePath': '65', 'SceneRow': '177', 'SatPath': '65', 'SatRow': '177', 'SceneCount': '1', 'SceneShift': '1', 'StartTime': '2013-11-05 13:52:08', 'EndTime': '2013-11-05 13:52:14', 'CenterTime': '2013-11-05 13:52:11', 'ImageGSD': '8', 'WidthInPixels': '4548', 'HeightInPixels': '4500', 'WidthInMeters': None, 'HeightInMeters': None, 'CloudPercent': '0', 'QualityInfo': None, 'PixelBits': None, 'ValidPixelBits': None, 'RollViewingAngle': '0', 'PitchViewingAngle': '0', 'RollSatelliteAngle': '9.56992e-05', 'PitchSatelliteAngle': '0.000105642', 'YawSatelliteAngle': '2.91257', 'SolarAzimuth': '169.08', 'SolarZenith': '33.6357', 'SatelliteAzimuth': '81.7259', 'SatelliteZenith': '88.1627', 'GainMode': 'G0,G4,G0,G0', 'IntegrationTime': '0.00115931', 'IntegrationLevel': 'S5,S3,S4,S3', 'MapProjection': None, 'EarthEllipsoid': None, 'ZoneNo': None, 'ResamplingKernel': None, 'HeightMode': None, 'MtfCorrection': 'LAB', 'RelativeCorrectionData': None, 'TopLeftLatitude': '40.1695', 'TopLeftLongitude': '78.2523', 'TopRightLatitude': '40.0948', 'TopRightLongitude': '78.6629', 'BottomRightLatitude': '39.7655', 'BottomRightLongitude': '78.5609', 'BottomLeftLatitude': '39.84', 'BottomLeftLongitude': '78.1522', 'TopLeftMapX': None, 'TopLeftMapY': None, 'TopRightMapX': None, 'TopRightMapY': None, 'BottomRightMapX': None, 'BottomRightMapY': None, 'BottomLeftMapX': None, 'BottomLeftMapY': None}}
|
||||
# dict_to_xml(a, './res.xml')
|
78
util/zxby.py
Normal file
78
util/zxby.py
Normal file
@ -0,0 +1,78 @@
|
||||
"""
|
||||
Author : XinYi Song
|
||||
Time : 2021/10/9 9:43
|
||||
Desc:
|
||||
"""
|
||||
import os, sys, subprocess, tempfile, time
|
||||
|
||||
# 创建临时文件夹,返回临时文件夹路径
|
||||
TempFile = tempfile.mkdtemp(suffix='_test', prefix='python_')
|
||||
# 文件名
|
||||
FileNum = int(time.time() * 1000)
|
||||
# python编译器位置
|
||||
EXEC = sys.executable
|
||||
|
||||
|
||||
# 获取python版本
|
||||
def get_version():
|
||||
v = sys.version_info
|
||||
version = "python %s.%s" % (v.major, v.minor)
|
||||
return version
|
||||
|
||||
|
||||
# 获得py文件名
|
||||
def get_pyname():
|
||||
global FileNum
|
||||
return 'test_%d' % FileNum
|
||||
|
||||
|
||||
# 接收代码写入文件
|
||||
def write_file(pyname, code):
|
||||
fpath = os.path.join(TempFile, '%s.py' % pyname)
|
||||
with open(fpath, 'w', encoding='utf-8') as f:
|
||||
f.write(code)
|
||||
print('file path: %s' % fpath)
|
||||
return fpath
|
||||
|
||||
|
||||
# 编码
|
||||
def decode(s):
|
||||
try:
|
||||
return s.decode('utf-8')
|
||||
except UnicodeDecodeError:
|
||||
return s.decode('gbk')
|
||||
|
||||
# 主执行函数
|
||||
|
||||
|
||||
def main(code):
|
||||
r = dict()
|
||||
r["version"] = get_version()
|
||||
pyname = get_pyname()
|
||||
fpath = write_file(pyname, code)
|
||||
try:
|
||||
# subprocess.check_output 是 父进程等待子进程完成,返回子进程向标准输出的输出结果
|
||||
# stderr是标准输出的类型
|
||||
outdata = decode(subprocess.check_output([EXEC, fpath], stderr=subprocess.STDOUT, timeout=5))
|
||||
except subprocess.CalledProcessError as e:
|
||||
# e.output是错误信息标准输出
|
||||
# 错误返回的数据
|
||||
r["code"] = 'Error'
|
||||
r["output"] = decode(e.output)
|
||||
return r
|
||||
else:
|
||||
# 成功返回的数据
|
||||
r['output'] = outdata
|
||||
return r
|
||||
finally:
|
||||
# 删除文件(其实不用删除临时文件会自动删除)
|
||||
try:
|
||||
os.remove(fpath)
|
||||
except Exception as e:
|
||||
exit(1)
|
||||
|
||||
|
||||
# if __name__ == '__main__':
|
||||
# code = "print(11);print(22)"
|
||||
# print(main(code))
|
||||
|
Reference in New Issue
Block a user