This commit is contained in:
yooooger 2025-07-31 15:56:12 +08:00
parent 40209e160b
commit 8e2c1b8654
3 changed files with 2 additions and 370 deletions

View File

@ -2,14 +2,14 @@ import psycopg2
from datetime import datetime
from miniohelp import *
def insert_data(table_name, data):
def insert_data(table_name, data, table = 'danger'):
"""
PostgreSQL 数据库的指定表中插入数据
:param table_name: 表名
:param data: 字典格式的数据列名:
"""
# 验证表名是否合法
if table_name not in ['danger']: # 在这里加入你的表名
if table_name not in [table]: # 在这里加入你的表名
print(f"错误: 无效的表名 {table_name}")
return

View File

@ -1,234 +0,0 @@
import os
import logging
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib.colors import LinearSegmentedColormap
from shapely.geometry import Polygon, Point
from tqdm import tqdm
from py3dtiles.tileset import TileSet
import requests
# 日志配置
logger = logging.getLogger("TilesetProcessor")
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
class TilesetProcessor:
"""3D Tiles数据集处理器用于加载、分析和比较两个3D Tiles模型"""
def __init__(self, tileset_path1, tileset_path2, resolution=1.0, polygon_points=None):
self.tileset1 = self._load_tileset(tileset_path1)
self.tileset2 = self._load_tileset(tileset_path2)
self.resolution = resolution
self.analysis_area = None
self.height_difference_grid = None
self.grid_bounds = None
if polygon_points:
self.set_analysis_area(polygon_points=polygon_points)
def _load_tileset(self, path_or_url):
try:
logger.info(f"加载3D Tiles数据集: {path_or_url}")
if path_or_url.startswith("http://") or path_or_url.startswith("https://"):
resp = requests.get(path_or_url)
resp.raise_for_status()
tileset_json = resp.json()
tileset = TileSet.from_dict(tileset_json)
else:
tileset = TileSet.from_file(path_or_url)
logger.info(f"成功加载,包含 {len(tileset.root.children)} 个根瓦片")
return tileset
except Exception as e:
logger.error(f"加载数据集失败(路径: {path_or_url}: {e}")
raise
def set_analysis_area(self, bounds=None, polygon_points=None):
if polygon_points:
self.analysis_area = Polygon(polygon_points)
min_x = min(p[0] for p in polygon_points)
min_y = min(p[1] for p in polygon_points)
max_x = max(p[0] for p in polygon_points)
max_y = max(p[1] for p in polygon_points)
self.grid_bounds = (min_x, min_y, max_x, max_y)
logger.info(f"设置多边形分析区域: {polygon_points}")
elif bounds:
min_x, min_y, max_x, max_y = bounds
self.analysis_area = Polygon([
(min_x, min_y),
(max_x, min_y),
(max_x, max_y),
(min_x, max_y)
])
self.grid_bounds = bounds
logger.info(f"设置矩形分析区域: {bounds}")
else:
logger.error("请提供 bounds 或 polygon_points")
return False
logger.info(f"分析区域面积: {self.analysis_area.area:.2f} 平方米")
return True
def sample_heights(self):
if self.analysis_area is None:
logger.error("请先设置分析区域")
return False
logger.info("开始在分析区域内采样高度值...")
min_x, min_y, max_x, max_y = self.grid_bounds
rows = int((max_y - min_y) / self.resolution) + 1
cols = int((max_x - min_x) / self.resolution) + 1
self.height_difference_grid = np.full((rows, cols), np.nan, dtype=np.float32)
total_points = rows * cols
logger.info(f"创建了 {rows}x{cols}={total_points} 个采样点")
with tqdm(total=total_points, desc="采样高度点") as pbar:
for i in range(rows):
for j in range(cols):
x = min_x + j * self.resolution
y = min_y + i * self.resolution
point = Point(x, y)
if not self.analysis_area.contains(point):
pbar.update(1)
continue
height1 = self._sample_height_at_point(self.tileset1, x, y)
height2 = self._sample_height_at_point(self.tileset2, x, y)
if height1 is not None and height2 is not None:
self.height_difference_grid[i, j] = height2 - height1
pbar.update(1)
valid_differences = self.height_difference_grid[~np.isnan(self.height_difference_grid)]
if len(valid_differences) > 0:
logger.info("高度变化统计:")
logger.info(f" 平均变化: {np.mean(valid_differences):.2f}m")
logger.info(f" 最大上升: {np.max(valid_differences):.2f}m")
logger.info(f" 最大下降: {np.min(valid_differences):.2f}m")
logger.info(f" 变化标准差: {np.std(valid_differences):.2f}m")
else:
logger.warning("未找到有效的高度差异数据")
return True
def _sample_height_at_point(self, tileset, x, y, max_depth=3):
def find_tile(tile, depth=0):
bbox = tile.bounding_volume.box
min_x_tile = bbox[0] - bbox[3]
max_x_tile = bbox[0] + bbox[3]
min_y_tile = bbox[1] - bbox[4]
max_y_tile = bbox[1] + bbox[4]
if not (min_x_tile <= x <= max_x_tile and min_y_tile <= y <= max_y_tile):
return None
if (tile.content is not None and depth >= max_depth) or not tile.children:
return tile
for child in tile.children:
result = find_tile(child, depth + 1)
if result is not None:
return result
return None
tile = find_tile(tileset.root)
if tile is None or tile.content is None:
return None
try:
# 简化模拟返回瓦片中心高度加随机偏移
return tile.bounding_volume.box[2] + np.random.uniform(-0.5, 0.5)
except Exception as e:
logger.warning(f"获取瓦片高度失败: {e}")
return None
def export_results(self, output_dir="results"):
if self.height_difference_grid is None:
logger.error("请先采样高度值")
return
os.makedirs(output_dir, exist_ok=True)
csv_path = os.path.join(output_dir, "height_differences.csv")
logger.info(f"导出CSV文件: {csv_path}")
min_x, min_y, max_x, max_y = self.grid_bounds
rows, cols = self.height_difference_grid.shape
data = []
for i in range(rows):
for j in range(cols):
if not np.isnan(self.height_difference_grid[i, j]):
x = min_x + j * self.resolution
y = min_y + i * self.resolution
data.append({
'x': x,
'y': y,
'height_difference': self.height_difference_grid[i, j]
})
df = pd.DataFrame(data)
df.to_csv(csv_path, index=False)
self._generate_heatmap(output_dir)
logger.info(f"结果已导出到 {output_dir} 目录")
def _generate_heatmap(self, output_dir):
colors = [(0.0, 0.0, 1.0), (1.0, 1.0, 1.0), (1.0, 0.0, 0.0)]
cmap = LinearSegmentedColormap.from_list('height_diff_cmap', colors, N=256)
data = self.height_difference_grid.copy()
valid_mask = ~np.isnan(data)
if not np.any(valid_mask):
logger.warning("没有有效的高度差异数据,无法生成热图")
return
data[~valid_mask] = 0
plt.figure(figsize=(12, 10))
plt.imshow(data, cmap=cmap, origin='lower',
extent=[self.grid_bounds[0], self.grid_bounds[2],
self.grid_bounds[1], self.grid_bounds[3]],
alpha=0.9)
cbar = plt.colorbar()
cbar.set_label('高度变化 (米)', fontsize=12)
plt.title('两个3D Tiles模型的高度变化分布', fontsize=16)
plt.xlabel('X坐标 (米)', fontsize=12)
plt.ylabel('Y坐标 (米)', fontsize=12)
heatmap_path = os.path.join(output_dir, "height_difference_heatmap.png")
plt.savefig(heatmap_path, dpi=300, bbox_inches='tight')
plt.close()
logger.info(f"热图已保存到: {heatmap_path}")
if __name__ == "__main__":
tileset1_url = "http://8.137.54.85:9000/300bdf2b-a150-406e-be63-d28bd29b409f/dszh/1748398014403562192_OUT/B3DM/tileset.json"
tileset2_url = "http://8.137.54.85:9000/300bdf2b-a150-406e-be63-d28bd29b409f/dszh/1748325943733189898_OUT/B3DM/tileset.json"
polygon_coords = [
(102.2232, 29.3841),
(102.2261, 29.3845),
(102.2263, 29.3821),
(102.2231, 29.3818)
]
resolution = 0.5
output_dir = "output_results"
processor = TilesetProcessor(tileset1_url, tileset2_url, resolution, polygon_coords)
if processor.sample_heights():
processor.export_results(output_dir)
print("分析完成!结果已导出到指定目录。")
else:
print("高度采样失败,无法完成分析。")

View File

@ -1,134 +0,0 @@
import os
import json
import numpy as np
import requests
from urllib.parse import urlparse, urljoin
from pathlib import Path
from pyproj import Transformer
from py3dtiles.tileset.content import B3dm
from py3dtiles.tilers.b3dm.wkb_utils import TriangleSoup
CACHE_DIR = "./_cache_tiles"
os.makedirs(CACHE_DIR, exist_ok=True)
def download_file(url, cache_dir=CACHE_DIR):
os.makedirs(cache_dir, exist_ok=True)
filename = os.path.basename(urlparse(url).path)
local_path = os.path.join(cache_dir, filename)
if not os.path.exists(local_path):
print(f"📥 Downloading {url}")
r = requests.get(url)
r.raise_for_status()
with open(local_path, "wb") as f:
f.write(r.content)
return local_path
def apply_transform(points, transform):
n = points.shape[0]
homo = np.hstack([points, np.ones((n,1))])
transformed = (transform @ homo.T).T[:, :3]
return transformed
def extract_positions_from_b3dm(b3dm_bytes, transform=None):
soup = TriangleSoup.from_glb_bytes(b3dm_bytes)
positions = soup.get_positions() # Nx3 numpy
if transform is not None:
positions = apply_transform(positions, transform)
return positions
def load_tileset_json(path_or_url):
if path_or_url.startswith("http"):
local_path = download_file(path_or_url)
else:
local_path = path_or_url
with open(local_path, "r", encoding="utf-8") as f:
js = json.load(f)
if "geometricError" not in js:
print("⚠️ tileset.json缺少geometricError自动补 1000.0")
js["geometricError"] = 1000.0
return js, os.path.dirname(local_path), path_or_url if path_or_url.startswith("http") else None
def parse_transform(tile_json):
# 返回 4x4 np.array
if "transform" in tile_json:
return np.array(tile_json["transform"]).reshape(4,4)
else:
return np.eye(4)
def recursive_load(tile_json, base_dir, base_url, bbox, transformer, transform_accum=None):
if transform_accum is None:
transform_accum = np.eye(4)
points_list = []
tile_transform = parse_transform(tile_json)
transform_total = transform_accum @ tile_transform
print(f"\n处理 Tile当前累计 transform 矩阵:\n{transform_total}")
print(f"Tile 的 bbox (输入): {bbox}")
content = tile_json.get("content")
if content:
uri = content.get("uri")
if uri and uri.endswith(".b3dm"):
if base_url is not None:
b3dm_url = urljoin(base_url + "/", uri)
b3dm_path = download_file(b3dm_url)
else:
b3dm_path = os.path.join(base_dir, uri)
if os.path.exists(b3dm_path):
with open(b3dm_path, "rb") as f:
b3dm = B3dm.from_array(f.read())
positions = extract_positions_from_b3dm(b3dm.glb_bytes, transform_total)
print(f"加载 {b3dm_path},原始顶点数量: {len(positions)}")
lon, lat, elev = transformer.transform(
positions[:,0], positions[:,1], positions[:,2]
)
print(f"转换后经度范围: {lon.min():.6f} ~ {lon.max():.6f}")
print(f"转换后纬度范围: {lat.min():.6f} ~ {lat.max():.6f}")
geo_pts = np.vstack([lon, lat, elev]).T
mask = (
(geo_pts[:,0] >= bbox[0]) & (geo_pts[:,0] <= bbox[2]) &
(geo_pts[:,1] >= bbox[1]) & (geo_pts[:,1] <= bbox[3])
)
in_bbox_count = np.sum(mask)
print(f"bbox内点数量: {in_bbox_count}")
points_list.append(geo_pts[mask])
elif uri and uri.endswith(".json"):
if base_url is not None:
child_url = urljoin(base_url + "/", uri)
child_json, child_base_dir, child_base_url = load_tileset_json(child_url)
else:
child_path = os.path.join(base_dir, uri)
child_json, child_base_dir, child_base_url = load_tileset_json(child_path)
pts = recursive_load(child_json, child_base_dir, child_base_url, bbox, transformer, transform_total)
if pts.size > 0:
points_list.append(pts)
for child in tile_json.get("children", []):
pts = recursive_load(child, base_dir, base_url, bbox, transformer, transform_total)
if pts.size > 0:
points_list.append(pts)
if points_list:
return np.vstack(points_list)
else:
return np.empty((0,3))
def load_tileset_all_points(path_or_url, bbox):
transformer = Transformer.from_crs("EPSG:4979", "EPSG:4326", always_xy=True)
tileset_json, base_dir, base_url = load_tileset_json(path_or_url)
points = recursive_load(tileset_json, base_dir, base_url, bbox, transformer)
print(f"\n总计提取 {len(points)} 个点在区域内")
return points
if __name__ == "__main__":
bbox = [116.391, 39.905, 116.405, 39.915]
model1_url = "http://8.137.54.85:9000/300bdf2b-a150-406e-be63-d28bd29b409f/dszh/1748398014403562192_OUT/B3DM/tileset.json"
model2_url = "http://8.137.54.85:9000/300bdf2b-a150-406e-be63-d28bd29b409f/dszh/1748325943733189898_OUT/B3DM/tileset.json"
pts1 = load_tileset_all_points(model1_url, bbox)
pts2 = load_tileset_all_points(model2_url, bbox)