Spaces:
Paused
Paused
| import numpy as np | |
| import os | |
| import json | |
| from scipy.spatial.transform import Rotation as R | |
| import numpy as np | |
| from scipy.spatial.transform import Rotation as R | |
| # Import centralized configuration for path management | |
| try: | |
| from cube3d.config import get_mapping_paths | |
| print("Successfully imported config from cube3d.config") | |
| except ImportError as e: | |
| print(f"Import from cube3d.config failed: {e}") | |
| try: | |
| from config import get_mapping_paths | |
| print("Successfully imported config from config") | |
| except ImportError as e: | |
| print(f"Failed to import config: {e}") | |
| raise ImportError("Failed to import get_mapping_paths from cube3d.config or config") | |
| # from cube3d.training.check_rotation_onehot import rot_to_onehot24, onehot24_to_rot | |
| # #from check_rotation_onehot import rot_to_onehot24, onehot24_to_rot | |
| try: | |
| from cube3d.training.check_rotation_onehot import ( | |
| rot_to_onehot24, | |
| onehot24_to_rot, | |
| signed_perm_mats_det_plus_1 | |
| ) | |
| print("Successfully imported from cube3d.training.check_rotation_onehot") | |
| except ImportError as e: | |
| print(f"Import from cube3d.training.check_rotation_onehot failed: {e}") | |
| try: | |
| from check_rotation_onehot import ( | |
| rot_to_onehot24, | |
| onehot24_to_rot, | |
| signed_perm_mats_det_plus_1 | |
| ) | |
| print("Successfully imported from check_rotation_onehot") | |
| except ImportError as e: | |
| print(f"Import from check_rotation_onehot failed: {e}") | |
| raise ImportError( | |
| "Failed to import rot_to_onehot24, onehot24_to_rot, " | |
| "and signed_perm_mats_det_plus_1 from both " | |
| "cube3d.training.check_rotation_onehot and check_rotation_onehot" | |
| ) | |
| def rotation_to_onehot(rotation_matrix): | |
| possible_angles = [0, 90, 180, 270] | |
| # 初始化一个64维的one-hot编码 | |
| one_hot = np.zeros(64) | |
| #import ipdb; ipdb.set_trace() | |
| try: | |
| #import ipdb; ipdb.set_trace() | |
| x_angle = possible_angles.index(round(np.arctan2(np.round(rotation_matrix[2, 1], 1), np.round(rotation_matrix[2, 2],1)) * 180 / np.pi) % 360) | |
| y_angle = possible_angles.index(round(np.arctan2(np.round(rotation_matrix[2, 0], 1), np.round(rotation_matrix[2, 2],1)) * 180 / np.pi) % 360) | |
| z_angle = possible_angles.index(round(np.arctan2(np.round(rotation_matrix[1, 0], 1), np.round(rotation_matrix[0, 0],1)) * 180 / np.pi) % 360) | |
| # 根据x, y, z的旋转角度组合确定one-hot的索引 | |
| index = x_angle * 16 + y_angle * 4 + z_angle | |
| except Exception as e: | |
| modified_matrix = rotation_matrix.copy() | |
| max_vals = np.max(np.abs(modified_matrix), axis=1) # 每行最大绝对值 | |
| sign_matrix = np.sign(modified_matrix) | |
| modified_matrix = sign_matrix * (np.abs(modified_matrix) == max_vals[:, None]) | |
| x_angle = possible_angles.index(round(np.arctan2(np.round(modified_matrix[2, 1], 1), np.round(modified_matrix[2, 2],1)) * 180 / np.pi) % 360) | |
| y_angle = possible_angles.index(round(np.arctan2(np.round(modified_matrix[2, 0], 1), np.round(modified_matrix[2, 2],1)) * 180 / np.pi) % 360) | |
| z_angle = possible_angles.index(round(np.arctan2(np.round(modified_matrix[1, 0], 1), np.round(modified_matrix[0, 0],1)) * 180 / np.pi) % 360) | |
| index = x_angle * 16 + y_angle * 4 + z_angle | |
| if index >= 64: | |
| print(f"Error occurred: {e}") | |
| with open("rotation_matrix_300_afterroundafter1_error_log.txt", "a") as file: # 使用 "a" 模式追加内容 | |
| file.write(f"Error with rotation matrix:\n{np.round(modified_matrix, 1)}\n") | |
| file.write("-" * 50 + "\n") # 可选:添加分隔符,帮助区分不同错误 | |
| one_hot[index] = 1 | |
| return one_hot | |
| import numpy as np | |
| def onehot_to_rotation(one_hot): | |
| # 定义可能的角度 | |
| possible_angles = [0, 90, 180, 270] | |
| # 获取one-hot编码中的索引位置 | |
| index = one_hot.argmax() # 找到值为1的那个位置,即索引 | |
| # 根据索引推导出x、y、z的角度 | |
| x_angle = possible_angles[(index // 16) % 4] # x轴旋转角度 | |
| y_angle = possible_angles[(index // 4) % 4] # y轴旋转角度 | |
| z_angle = possible_angles[index % 4] # z轴旋转角度 | |
| # 根据这些角度构建旋转矩阵 | |
| # 构建绕X轴旋转矩阵 | |
| Rx = np.array([[1, 0, 0], | |
| [0, np.cos(np.radians(x_angle)), -np.sin(np.radians(x_angle))], | |
| [0, np.sin(np.radians(x_angle)), np.cos(np.radians(x_angle))]]) | |
| # 构建绕Y轴旋转矩阵 | |
| Ry = np.array([[np.cos(np.radians(y_angle)), 0, np.sin(np.radians(y_angle))], | |
| [0, 1, 0], | |
| [-np.sin(np.radians(y_angle)), 0, np.cos(np.radians(y_angle))]]) | |
| # 构建绕Z轴旋转矩阵 | |
| Rz = np.array([[np.cos(np.radians(z_angle)), -np.sin(np.radians(z_angle)), 0], | |
| [np.sin(np.radians(z_angle)), np.cos(np.radians(z_angle)), 0], | |
| [0, 0, 1]]) | |
| # 将这三个矩阵相乘得到总的旋转矩阵 | |
| rotation_matrix = np.dot(Rz, np.dot(Ry, Rx)) | |
| return rotation_matrix | |
| def load_mappings(label_mapping_file, label_inverse_mapping_file): | |
| print(f"🔍 [DEBUG] load_mappings() called") | |
| print(f" Forward file: {label_mapping_file}") | |
| print(f" Inverse file: {label_inverse_mapping_file}") | |
| try: | |
| with open(label_mapping_file, 'r') as f: | |
| label_mapping = json.load(f) | |
| print(f" ✅ Forward mapping loaded: {len(label_mapping)} entries") | |
| except Exception as e: | |
| print(f" ❌ Failed to load forward mapping: {e}") | |
| raise | |
| try: | |
| with open(label_inverse_mapping_file, 'r') as f: | |
| label_inverse_mapping = json.load(f) | |
| print(f" ✅ Inverse mapping loaded: {len(label_inverse_mapping)} entries") | |
| except Exception as e: | |
| print(f" ❌ Failed to load inverse mapping: {e}") | |
| raise | |
| return label_mapping, label_inverse_mapping | |
| # 读取LDR文件,逐行读取 | |
| def read_ldr_file(file_path): | |
| with open(file_path, 'r') as f: | |
| return f.readlines() | |
| # 处理LDR文件并提取数据 | |
| def process_ldr_data(lines): | |
| data = [] | |
| filenames = [] | |
| all_coords = [] | |
| all_colors = [] | |
| all_labels = [] | |
| label_mapping = {} | |
| label_inverse_mapping = {} | |
| # Use config-based paths (works in both local and HF Space environments) | |
| forward_path, inverse_path = get_mapping_paths("subset_self") | |
| label_mapping, label_inverse_mapping = load_mappings(forward_path, inverse_path) | |
| label_counter = 0 | |
| max_x = 212 | |
| max_y = 216 | |
| max_z = 528 | |
| for line in lines: | |
| if line.startswith('1'): # 只处理零件数据行 | |
| parts = line.split() # 按空格分割每一列数据 | |
| # if len(parts) != 15: # 检查是否每行都有15个部分 #这里也有问题 | |
| # print(f"Skipping line due to unexpected length: {line.strip()}") | |
| # continue # 如果数据不完整,则跳过该行 | |
| color = int(parts[1]) # 颜色 | |
| x, y, z = round(float(parts[2])), round(float(parts[3])), round(float(parts[4])) | |
| rx = list(map(float, parts[5:8])) # 旋转矩阵第一行 | |
| ry = list(map(float, parts[8:11])) # 旋转矩阵第二行 | |
| rz = list(map(float, parts[11:14])) # 旋转矩阵第三行 | |
| filename = parts[14] # 文件名 | |
| all_coords.append([x, y, z]) | |
| all_colors.append(color) | |
| if ".DAT" in filename: | |
| filename = filename.replace(".DAT", ".dat") | |
| if filename not in label_mapping: | |
| # import ipdb; ipdb.set_trace() | |
| label_mapping[filename] = label_counter | |
| label_inverse_mapping[label_counter] = filename | |
| label_counter += 1 | |
| all_labels.append(label_mapping[filename]) | |
| rotation_matrix = np.array([rx + ry + rz]).reshape(3, 3) | |
| # r = R.from_matrix(rotation_matrix) | |
| # quaternion = r.as_quat() | |
| rotation_onehot, _ = rot_to_onehot24(rotation_matrix) | |
| #data.append([color, x, y, z] + rotation_onehot.tolist()) | |
| #data.append([x, y, z] + rotation_onehot.tolist()) | |
| data.append(rotation_onehot.tolist()) | |
| #data.append([x, y, z] + quaternion.tolist()) | |
| filenames.append(filename) | |
| all_coords = np.array(all_coords) | |
| # min_vals = np.min(all_coords, axis=0) # 每个坐标轴的最小值 | |
| # max_vals = np.max(all_coords, axis=0) # 每个坐标轴的最大值 | |
| # print(max_vals, min_vals) | |
| # normalized_coords = (all_coords - min_vals) / (max_vals - min_vals) | |
| # normalized_coords = 2 * normalized_coords - 1 | |
| # for i, entry in enumerate(data): | |
| # entry[0:3] = normalized_coords[i] # 更新 x, y, z 坐标 | |
| one_hot_x = np.eye(max_x+1)[all_coords[:, 0].astype(int)] | |
| one_hot_y = np.eye(max_y+1)[all_coords[:, 1].astype(int)] | |
| one_hot_z = np.eye(max_z+1)[all_coords[:, 2].astype(int)] | |
| for i, entry in enumerate(data): | |
| #entry.append(normalized_labels[i]) # 添加标准化标签到数据中 | |
| entry.extend(np.concatenate([one_hot_x[i], one_hot_y[i], one_hot_z[i]])) # Using numpy to concatenate | |
| # color_min = np.min(all_colors) | |
| # color_max = np.max(all_colors) | |
| # normalized_colors = (np.array(all_colors) - color_min) / (color_max - color_min) | |
| # 更新数据:将每个零件的颜色替换为标准化后的颜色 | |
| # for i, entry in enumerate(data): | |
| # entry[0] = normalized_colors[i] # 更新颜色 | |
| if label_mapping is None: | |
| label_mapping = {filename: idx for idx, filename in enumerate(sorted(set(all_labels)))} | |
| else: | |
| label_mapping = label_mapping | |
| #all_labels = [label_mapping[label] for label in all_labels] | |
| #label_min = np.min(all_labels) # 获取标签的最小值 | |
| #label_max = np.max(all_labels) # 获取标签的最大值 | |
| label_max = len(label_mapping) # 获取标签的最大值 | |
| # 将标签标准化到 [0, 1] 范围 | |
| #normalized_labels = (all_labels - label_min) / (label_max - label_min) | |
| one_hot_labels = np.eye(label_max)[all_labels] | |
| # 更新数据:将每个零件的标签替换为标准化后的标签 | |
| for i, entry in enumerate(data): | |
| #entry.append(normalized_labels[i]) # 添加标准化标签到数据中 | |
| entry.extend(one_hot_labels[i]) # 添加one-hot编码标签到数据中 | |
| for i, entry in enumerate(data): | |
| #entry.append(normalized_labels[i]) # 添加标准化标签到数据中 | |
| entry.extend([1,0]) # | |
| #import ipdb; ipdb.set_trace() | |
| return np.array(data), label_inverse_mapping # 将数据转换为NumPy数组 | |
| def process_ldr_flatten(lines): | |
| data = [] | |
| filenames = [] | |
| all_coords = [] | |
| all_colors = [] | |
| all_labels = [] | |
| label_mapping = {} | |
| label_inverse_mapping = {} | |
| # Use config-based paths (works in both local and HF Space environments) | |
| forward_path, inverse_path = get_mapping_paths("subset_1k") | |
| label_mapping, label_inverse_mapping = load_mappings(forward_path, inverse_path) | |
| label_counter = 0 | |
| max_x = 250 | |
| max_y = 214 | |
| max_z = 524 | |
| #print(lines) | |
| for line in lines: | |
| if line.startswith('1'): | |
| parts = line.split() | |
| #if len(parts) != 15: | |
| # if len(parts) < 15: | |
| # print(f"Skipping line due to unexpected length: {line.strip()}") | |
| # continue | |
| #print(parts) | |
| color = int(parts[1]) | |
| x, y, z = round(float(parts[2])), round(float(parts[3])), round(float(parts[4])) | |
| rx = list(map(float, parts[5:8])) | |
| ry = list(map(float, parts[8:11])) | |
| rz = list(map(float, parts[11:14])) | |
| filename = parts[14].lower() | |
| all_coords.append([x, y, z]) | |
| all_colors.append(color) | |
| if ".DAT" in filename: | |
| filename = filename.replace(".DAT", ".dat") | |
| if filename not in label_mapping: | |
| # import ipdb; ipdb.set_trace() | |
| label_mapping[filename] = label_counter | |
| label_inverse_mapping[label_counter] = filename | |
| label_counter += 1 | |
| all_labels.append(label_mapping[filename]) | |
| rotation_matrix = np.array([rx + ry + rz]).reshape(3, 3) | |
| # r = R.from_matrix(rotation_matrix) | |
| # quaternion = r.as_quat() | |
| rotation_onehot, _ = rot_to_onehot24(rotation_matrix) | |
| rotation_id = rotation_onehot.argmax() | |
| #data.append([color, x, y, z] + rotation_onehot.tolist()) | |
| #data.append([x, y, z] + rotation_onehot.tolist()) | |
| #data.append(rotation_onehot.tolist()) | |
| data.append([rotation_id]) | |
| #data.append([x, y, z] + quaternion.tolist()) | |
| filenames.append(filename) | |
| all_coords = np.array(all_coords) | |
| # min_vals = np.min(all_coords, axis=0) | |
| # max_vals = np.max(all_coords, axis=0) | |
| # print(max_vals, min_vals) | |
| # normalized_coords = (all_coords - min_vals) / (max_vals - min_vals) | |
| # normalized_coords = 2 * normalized_coords - 1 | |
| # for i, entry in enumerate(data): | |
| # entry[0:3] = normalized_coords[i] | |
| #print(all_coords) | |
| one_hot_x = np.eye(max_x+1)[all_coords[:, 0].astype(int)] | |
| one_hot_y = np.eye(max_y+1)[all_coords[:, 1].astype(int)] | |
| one_hot_z = np.eye(max_z+1)[all_coords[:, 2].astype(int)] | |
| # for i, entry in enumerate(data): | |
| # #entry.append(normalized_labels[i]) | |
| # entry.extend(np.concatenate([one_hot_x[i], one_hot_y[i], one_hot_z[i]])) # Using numpy to concatenate | |
| # color_min = np.min(all_colors) | |
| # color_max = np.max(all_colors) | |
| # normalized_colors = (np.array(all_colors) - color_min) / (color_max - color_min) | |
| # for i, entry in enumerate(data): | |
| # entry[0] = normalized_colors[i] | |
| if label_mapping is None: | |
| label_mapping = {filename: idx for idx, filename in enumerate(sorted(set(all_labels)))} | |
| else: | |
| label_mapping = label_mapping | |
| #all_labels = [label_mapping[label] for label in all_labels] | |
| #label_min = np.min(all_labels) | |
| #label_max = np.max(all_labels) | |
| label_max = len(label_mapping) | |
| # 将标签标准化到 [0, 1] 范围 | |
| #normalized_labels = (all_labels - label_min) / (label_max - label_min) | |
| one_hot_labels = np.eye(label_max)[all_labels] | |
| for i, entry in enumerate(data): | |
| #entry.append(normalized_labels[i]) # | |
| #entry.extend(one_hot_labels[i]) # | |
| entry.extend([all_labels[i]]) # | |
| entry.extend(np.concatenate([ | |
| np.array([all_coords[i, 0].astype(int)]), | |
| np.array([all_coords[i, 1].astype(int)]), | |
| np.array([all_coords[i, 2].astype(int)]) | |
| ])) | |
| for i, entry in enumerate(data): | |
| #entry.append(normalized_labels[i]) | |
| entry.extend([1,0]) # | |
| #import ipdb; ipdb.set_trace() | |
| return np.array(data), label_inverse_mapping | |
| def process_ldr_flatten_bottom(lines): | |
| data = [] | |
| filenames = [] | |
| all_coords = [] | |
| all_colors = [] | |
| all_labels = [] | |
| label_mapping = {} | |
| label_inverse_mapping = {} | |
| label_mapping, label_inverse_mapping = load_mappings('/public/home/wangshuo/gap/assembly/data/car_1k/subset_bottom_300/label_mapping.json', '/public/home/wangshuo/gap/assembly/data/car_1k/subset_bottom_300/label_inverse_mapping.json') | |
| label_counter = 0 | |
| max_x = 212 | |
| max_y = 72 | |
| max_z = 410 | |
| for line in lines: | |
| if line.startswith('1'): | |
| parts = line.split() | |
| # if len(parts) != 15: | |
| # print(f"Skipping line due to unexpected length: {line.strip()}") | |
| # continue | |
| color = int(parts[1]) | |
| x, y, z = round(float(parts[2])), round(float(parts[3])), round(float(parts[4])) | |
| rx = list(map(float, parts[5:8])) | |
| ry = list(map(float, parts[8:11])) | |
| rz = list(map(float, parts[11:14])) | |
| filename = parts[14].lower() | |
| all_coords.append([x, y, z]) | |
| all_colors.append(color) | |
| if ".DAT" in filename: | |
| filename = filename.replace(".DAT", ".dat") | |
| if filename not in label_mapping: | |
| # import ipdb; ipdb.set_trace() | |
| label_mapping[filename] = label_counter | |
| label_inverse_mapping[label_counter] = filename | |
| label_counter += 1 | |
| all_labels.append(label_mapping[filename]) | |
| rotation_matrix = np.array([rx + ry + rz]).reshape(3, 3) | |
| # r = R.from_matrix(rotation_matrix) | |
| # quaternion = r.as_quat() | |
| rotation_onehot, _ = rot_to_onehot24(rotation_matrix) | |
| rotation_id = rotation_onehot.argmax() | |
| #data.append([color, x, y, z] + rotation_onehot.tolist()) | |
| #data.append([x, y, z] + rotation_onehot.tolist()) | |
| #data.append(rotation_onehot.tolist()) | |
| data.append([rotation_id]) | |
| #data.append([x, y, z] + quaternion.tolist()) | |
| filenames.append(filename) | |
| all_coords = np.array(all_coords) | |
| # min_vals = np.min(all_coords, axis=0) | |
| # max_vals = np.max(all_coords, axis=0) | |
| # print(max_vals, min_vals) | |
| # normalized_coords = (all_coords - min_vals) / (max_vals - min_vals) | |
| # normalized_coords = 2 * normalized_coords - 1 | |
| # for i, entry in enumerate(data): | |
| # entry[0:3] = normalized_coords[i] | |
| one_hot_x = np.eye(max_x+1)[all_coords[:, 0].astype(int)] | |
| one_hot_y = np.eye(max_y+1)[all_coords[:, 1].astype(int)] | |
| one_hot_z = np.eye(max_z+1)[all_coords[:, 2].astype(int)] | |
| # for i, entry in enumerate(data): | |
| # #entry.append(normalized_labels[i]) | |
| # entry.extend(np.concatenate([one_hot_x[i], one_hot_y[i], one_hot_z[i]])) # Using numpy to concatenate | |
| # color_min = np.min(all_colors) | |
| # color_max = np.max(all_colors) | |
| # normalized_colors = (np.array(all_colors) - color_min) / (color_max - color_min) | |
| # for i, entry in enumerate(data): | |
| # entry[0] = normalized_colors[i] | |
| if label_mapping is None: | |
| label_mapping = {filename: idx for idx, filename in enumerate(sorted(set(all_labels)))} | |
| else: | |
| label_mapping = label_mapping | |
| #all_labels = [label_mapping[label] for label in all_labels] | |
| #label_min = np.min(all_labels) | |
| #label_max = np.max(all_labels) | |
| label_max = len(label_mapping) | |
| # 将标签标准化到 [0, 1] 范围 | |
| #normalized_labels = (all_labels - label_min) / (label_max - label_min) | |
| one_hot_labels = np.eye(label_max)[all_labels] | |
| for i, entry in enumerate(data): | |
| #entry.append(normalized_labels[i]) # | |
| #entry.extend(one_hot_labels[i]) # | |
| entry.extend([all_labels[i]]) # | |
| entry.extend(np.concatenate([ | |
| np.array([all_coords[i, 0].astype(int)]), | |
| np.array([all_coords[i, 1].astype(int)]), | |
| np.array([all_coords[i, 2].astype(int)]) | |
| ])) | |
| for i, entry in enumerate(data): | |
| #entry.append(normalized_labels[i]) | |
| entry.extend([1,0]) # | |
| #import ipdb; ipdb.set_trace() | |
| return np.array(data), label_inverse_mapping | |
| def save_data_as_npy(data, output_file): | |
| np.save(output_file, data) | |
| def logits2ldr(normalized_data, label_inverse_mapping=None, max_vals=None, min_vals=None, label_max=None, label_min=None, max_color=None, min_color=None, output_file='restored_data.ldr'): | |
| dat_num = 604 | |
| x_num = 213 | |
| y_num = 217 | |
| z_num = 529 | |
| rot_num = 24 | |
| x = x_num | |
| xy = x_num + y_num + rot_num | |
| xyz = x_num + y_num + z_num + rot_num | |
| # Use config-based paths (works in both local and HF Space environments) | |
| forward_path, inverse_path = get_mapping_paths("subset_self") | |
| label_mapping, label_inverse_mapping = load_mappings(forward_path, inverse_path) | |
| if label_inverse_mapping is None: | |
| # import ipdb; ipdb.set_trace() | |
| # #label_inverse_mapping = {0: '98281.dat', 1: '3005.dat', 2: '3004.dat', 3: '3795.dat', 4: '3020.dat', 5: '3710.dat', 6: '3666.dat', 7: '3021.dat', 8: '2431.dat', 9: '4488.dat', 10: '3829a.dat', 11: '3829b.dat', 12: '43723.dat', 13: '3068b.dat', 14: '43722.dat', 15: '3832.dat', 16: '2432.dat', 17: '2437.dat', 18: '6231.dat', 19: '3040b.dat', 20: '3024.dat', 21: '11211.dat', 22: '2540.dat', 23: '61678.dat', 24: '3665.dat', 25: '11477.dat', 26: '93594.dat', 27: '50951.dat', 28: '4073.dat', 29: '6019.dat', 30: '6091.dat', 31: '3821.dat', 32: '3822.dat', 33: '98138.dat', 34: '3794a.dat', 35: '4081b.dat', 36: '3022.dat', 37: '30039.dat', 38: '50946.dat', 39: '4095.dat'} #blue_classic_car | |
| label_inverse_mapping = {0: '24308b.dat', 1: '3031.dat', 2: '4079.dat', 3: '3021.dat', 4: '3024.dat', 5: '3020.dat', 6: '29120.dat', 7: '71076a.dat', 8: '3023.dat', 9: '29119.dat', 10: '2412b.dat', 11: '86876.dat', 12: '11211.dat', 13: '87087.dat', 14: '3004.dat', 15: '15068.dat', 16: '3829c01.dat', 17: '11477.dat', 18: '79393.dat', 19: '63864.dat', 20: '3710.dat', 21: 'm17f5892b_2023521_010804.dat', 22: '6141.dat', 23: '85984pc2.dat', 24: '3010.dat', 25: '30414.dat', 26: '2431pt0.dat'} | |
| normalized_labels = normalized_data[:, xyz:xyz+dat_num].argmax(1) | |
| #normalized_colors = normalized_data[:, 0] # 颜色列 | |
| normalized_coords_x = normalized_data[:, rot_num:rot_num+x].argmax(1) | |
| normalized_coords_y = normalized_data[:, rot_num+x:xy].argmax(1) | |
| normalized_coords_z = normalized_data[:, xy:xyz].argmax(1) | |
| restored_coords = np.stack((normalized_coords_x, normalized_coords_y, normalized_coords_z), axis=-1) | |
| #import ipdb; ipdb.set_trace() | |
| #restored_coords = ((normalized_coords + 1) / 2) * (max_vals - min_vals) + min_vals | |
| #restored_labels = (normalized_labels * (label_max - label_min)) + label_min | |
| #restored_colors = normalized_colors * (max_color - min_color) + min_color | |
| flag = normalized_data[:, xyz+dat_num:xyz+dat_num+2].argmax(1) | |
| ldr_lines = [] | |
| for i, entry in enumerate(normalized_data): | |
| color = 0 #int(restored_colors[i]) | |
| x, y, z = restored_coords[i] | |
| label = label_inverse_mapping[str(np.clip(np.round(normalized_labels[i]), 0, dat_num).astype(int))] | |
| # quaternion = entry[4:8] # | |
| # quaternion = quaternion / np.linalg.norm(quaternion) # | |
| # r = R.from_quat(quaternion) | |
| rotation_matrix = onehot24_to_rot(entry[:rot_num])#r.as_matrix() | |
| f = 1 - flag[i] | |
| ldr_line = f"{f} {color} {x:.6f} {y:.6f} {z:.6f} " \ | |
| f"{rotation_matrix[0, 0]:.6f} {rotation_matrix[0, 1]:.6f} {rotation_matrix[0, 2]:.6f} " \ | |
| f"{rotation_matrix[1, 0]:.6f} {rotation_matrix[1, 1]:.6f} {rotation_matrix[1, 2]:.6f} " \ | |
| f"{rotation_matrix[2, 0]:.6f} {rotation_matrix[2, 1]:.6f} {rotation_matrix[2, 2]:.6f} " \ | |
| f"{label}\n" | |
| ldr_lines.append(ldr_line) | |
| with open(output_file, 'w') as f: | |
| f.writelines(ldr_lines) | |
| print(f"Restored LDR data saved to {output_file}") | |
| return ldr_lines | |
| def logits2ldrot(normalized_data, input_data, label_inverse_mapping='', max_vals='', min_vals='', label_max='', label_min='', max_color='', min_color='', output_file='restored_data_rot_wop.ldr'): | |
| dat_num = 604 | |
| x_num = 213 | |
| y_num = 217 | |
| z_num = 529 | |
| rot_num = 24 | |
| x = x_num | |
| xy = x_num + y_num + rot_num | |
| xyz = x_num + y_num + z_num + rot_num | |
| # Use config-based paths (works in both local and HF Space environments) | |
| forward_path, inverse_path = get_mapping_paths("subset_self") | |
| label_mapping, label_inverse_mapping = load_mappings(forward_path, inverse_path) | |
| if label_inverse_mapping is None: | |
| # import ipdb; ipdb.set_trace() | |
| # #label_inverse_mapping = {0: '98281.dat', 1: '3005.dat', 2: '3004.dat', 3: '3795.dat', 4: '3020.dat', 5: '3710.dat', 6: '3666.dat', 7: '3021.dat', 8: '2431.dat', 9: '4488.dat', 10: '3829a.dat', 11: '3829b.dat', 12: '43723.dat', 13: '3068b.dat', 14: '43722.dat', 15: '3832.dat', 16: '2432.dat', 17: '2437.dat', 18: '6231.dat', 19: '3040b.dat', 20: '3024.dat', 21: '11211.dat', 22: '2540.dat', 23: '61678.dat', 24: '3665.dat', 25: '11477.dat', 26: '93594.dat', 27: '50951.dat', 28: '4073.dat', 29: '6019.dat', 30: '6091.dat', 31: '3821.dat', 32: '3822.dat', 33: '98138.dat', 34: '3794a.dat', 35: '4081b.dat', 36: '3022.dat', 37: '30039.dat', 38: '50946.dat', 39: '4095.dat'} #blue_classic_car | |
| label_inverse_mapping = {0: '24308b.dat', 1: '3031.dat', 2: '4079.dat', 3: '3021.dat', 4: '3024.dat', 5: '3020.dat', 6: '29120.dat', 7: '71076a.dat', 8: '3023.dat', 9: '29119.dat', 10: '2412b.dat', 11: '86876.dat', 12: '11211.dat', 13: '87087.dat', 14: '3004.dat', 15: '15068.dat', 16: '3829c01.dat', 17: '11477.dat', 18: '79393.dat', 19: '63864.dat', 20: '3710.dat', 21: 'm17f5892b_2023521_010804.dat', 22: '6141.dat', 23: '85984pc2.dat', 24: '3010.dat', 25: '30414.dat', 26: '2431pt0.dat'} | |
| input_labels = input_data[:, xyz:xyz+dat_num].argmax(1) | |
| input_coords_x = input_data[:, rot_num:rot_num+x].argmax(1) | |
| input_coords_y = input_data[:, rot_num+x:xy].argmax(1) | |
| input_coords_z = input_data[:, xy:xyz].argmax(1) | |
| restored_coords = np.stack((input_coords_x, input_coords_y, input_coords_z), axis=-1) | |
| #flag = normalized_data[:, xyz+dat_num:xyz+dat_num+2].argmax(1) | |
| flag = normalized_data[:, -2:].argmax(1) | |
| ldr_lines = [] | |
| for i, entry in enumerate(normalized_data[:-1]): | |
| color = 0 #int(restored_colors[i]) | |
| x, y, z = restored_coords[i] | |
| label = label_inverse_mapping[str(np.clip(np.round(input_labels[i]), 0, dat_num).astype(int))] | |
| # quaternion = entry[4:8] # | |
| # quaternion = quaternion / np.linalg.norm(quaternion) # | |
| # r = R.from_quat(quaternion) | |
| rotation_matrix = onehot24_to_rot(entry[:rot_num])#r.as_matrix() | |
| f = 1 - flag[i] | |
| ldr_line = f"{f} {color} {x:.6f} {y:.6f} {z:.6f} " \ | |
| f"{rotation_matrix[0, 0]:.6f} {rotation_matrix[0, 1]:.6f} {rotation_matrix[0, 2]:.6f} " \ | |
| f"{rotation_matrix[1, 0]:.6f} {rotation_matrix[1, 1]:.6f} {rotation_matrix[1, 2]:.6f} " \ | |
| f"{rotation_matrix[2, 0]:.6f} {rotation_matrix[2, 1]:.6f} {rotation_matrix[2, 2]:.6f} " \ | |
| f"{label}\n" | |
| ldr_lines.append(ldr_line) | |
| with open(output_file, 'w') as f: | |
| f.writelines(ldr_lines) | |
| print(f"Restored LDR data saved to {output_file}") | |
| return ldr_lines | |
| def logits2ldrp(normalized_data, input_data, label_inverse_mapping='', max_vals='', min_vals='', label_max='', label_min='', max_color='', min_color='', output_file='restored_data_rot_wop.ldr'): | |
| dat_num = 604 | |
| x_num = 213 | |
| y_num = 217 | |
| z_num = 529 | |
| rot_num = 24 | |
| x = x_num | |
| xy = x_num + y_num + rot_num | |
| xyz = x_num + y_num + z_num + rot_num | |
| # Use config-based paths (works in both local and HF Space environments) | |
| forward_path, inverse_path = get_mapping_paths("subset_self") | |
| label_mapping, label_inverse_mapping = load_mappings(forward_path, inverse_path) | |
| if label_inverse_mapping is None: | |
| # import ipdb; ipdb.set_trace() | |
| # #label_inverse_mapping = {0: '98281.dat', 1: '3005.dat', 2: '3004.dat', 3: '3795.dat', 4: '3020.dat', 5: '3710.dat', 6: '3666.dat', 7: '3021.dat', 8: '2431.dat', 9: '4488.dat', 10: '3829a.dat', 11: '3829b.dat', 12: '43723.dat', 13: '3068b.dat', 14: '43722.dat', 15: '3832.dat', 16: '2432.dat', 17: '2437.dat', 18: '6231.dat', 19: '3040b.dat', 20: '3024.dat', 21: '11211.dat', 22: '2540.dat', 23: '61678.dat', 24: '3665.dat', 25: '11477.dat', 26: '93594.dat', 27: '50951.dat', 28: '4073.dat', 29: '6019.dat', 30: '6091.dat', 31: '3821.dat', 32: '3822.dat', 33: '98138.dat', 34: '3794a.dat', 35: '4081b.dat', 36: '3022.dat', 37: '30039.dat', 38: '50946.dat', 39: '4095.dat'} #blue_classic_car | |
| label_inverse_mapping = {0: '24308b.dat', 1: '3031.dat', 2: '4079.dat', 3: '3021.dat', 4: '3024.dat', 5: '3020.dat', 6: '29120.dat', 7: '71076a.dat', 8: '3023.dat', 9: '29119.dat', 10: '2412b.dat', 11: '86876.dat', 12: '11211.dat', 13: '87087.dat', 14: '3004.dat', 15: '15068.dat', 16: '3829c01.dat', 17: '11477.dat', 18: '79393.dat', 19: '63864.dat', 20: '3710.dat', 21: 'm17f5892b_2023521_010804.dat', 22: '6141.dat', 23: '85984pc2.dat', 24: '3010.dat', 25: '30414.dat', 26: '2431pt0.dat'} | |
| input_labels = input_data[:, xyz:xyz+dat_num].argmax(1) | |
| # input_coords_x = input_data[:, rot_num:rot_num+x].argmax(1) | |
| # input_coords_y = input_data[:, rot_num+x:xy].argmax(1) | |
| # input_coords_z = input_data[:, xy:xyz].argmax(1) | |
| input_coords_x = normalized_data[:, rot_num:rot_num+x].argmax(1) | |
| input_coords_y = normalized_data[:, rot_num+x:xy].argmax(1) | |
| input_coords_z = normalized_data[:, xy:xyz].argmax(1) | |
| restored_coords = np.stack((input_coords_x, input_coords_y, input_coords_z), axis=-1) | |
| #flag = normalized_data[:, xyz+dat_num:xyz+dat_num+2].argmax(1) | |
| flag = normalized_data[:, -2:].argmax(1) | |
| ldr_lines = [] | |
| #for i, entry in enumerate(normalized_data[:-1]): | |
| for i, entry in enumerate(input_data[:-1]): | |
| color = 0 #int(restored_colors[i]) | |
| x, y, z = restored_coords[i] | |
| label = label_inverse_mapping[str(np.clip(np.round(input_labels[i]), 0, dat_num).astype(int))] | |
| # quaternion = entry[4:8] # | |
| # quaternion = quaternion / np.linalg.norm(quaternion) # | |
| # r = R.from_quat(quaternion) | |
| rotation_matrix = onehot24_to_rot(entry[:rot_num])#r.as_matrix() | |
| f = 1# - flag[i] | |
| ldr_line = f"{f} {color} {x:.6f} {y:.6f} {z:.6f} " \ | |
| f"{rotation_matrix[0, 0]:.6f} {rotation_matrix[0, 1]:.6f} {rotation_matrix[0, 2]:.6f} " \ | |
| f"{rotation_matrix[1, 0]:.6f} {rotation_matrix[1, 1]:.6f} {rotation_matrix[1, 2]:.6f} " \ | |
| f"{rotation_matrix[2, 0]:.6f} {rotation_matrix[2, 1]:.6f} {rotation_matrix[2, 2]:.6f} " \ | |
| f"{label}\n" | |
| ldr_lines.append(ldr_line) | |
| with open(output_file, 'w') as f: | |
| f.writelines(ldr_lines) | |
| print(f"Restored LDR data saved to {output_file}") | |
| return ldr_lines | |
| def logits2flatldrp(normalized_data, input_data, label_inverse_mapping='', max_vals='', min_vals='', label_max='', label_min='', max_color='', min_color='', output_file='restored_data_rot_wop.ldr'): | |
| dat_num = 604 | |
| x_num = 213 | |
| y_num = 217 | |
| z_num = 529 | |
| rot_num = 24 | |
| R24 = signed_perm_mats_det_plus_1() | |
| x = x_num | |
| xy = x_num + y_num + rot_num | |
| xyz = x_num + y_num + z_num + rot_num | |
| stride = 3 | |
| # Use config-based paths (works in both local and HF Space environments) | |
| forward_path, inverse_path = get_mapping_paths("subset_self") | |
| label_mapping, label_inverse_mapping = load_mappings(forward_path, inverse_path) | |
| if label_inverse_mapping is None: | |
| # import ipdb; ipdb.set_trace() | |
| # #label_inverse_mapping = {0: '98281.dat', 1: '3005.dat', 2: '3004.dat', 3: '3795.dat', 4: '3020.dat', 5: '3710.dat', 6: '3666.dat', 7: '3021.dat', 8: '2431.dat', 9: '4488.dat', 10: '3829a.dat', 11: '3829b.dat', 12: '43723.dat', 13: '3068b.dat', 14: '43722.dat', 15: '3832.dat', 16: '2432.dat', 17: '2437.dat', 18: '6231.dat', 19: '3040b.dat', 20: '3024.dat', 21: '11211.dat', 22: '2540.dat', 23: '61678.dat', 24: '3665.dat', 25: '11477.dat', 26: '93594.dat', 27: '50951.dat', 28: '4073.dat', 29: '6019.dat', 30: '6091.dat', 31: '3821.dat', 32: '3822.dat', 33: '98138.dat', 34: '3794a.dat', 35: '4081b.dat', 36: '3022.dat', 37: '30039.dat', 38: '50946.dat', 39: '4095.dat'} #blue_classic_car | |
| label_inverse_mapping = {0: '24308b.dat', 1: '3031.dat', 2: '4079.dat', 3: '3021.dat', 4: '3024.dat', 5: '3020.dat', 6: '29120.dat', 7: '71076a.dat', 8: '3023.dat', 9: '29119.dat', 10: '2412b.dat', 11: '86876.dat', 12: '11211.dat', 13: '87087.dat', 14: '3004.dat', 15: '15068.dat', 16: '3829c01.dat', 17: '11477.dat', 18: '79393.dat', 19: '63864.dat', 20: '3710.dat', 21: 'm17f5892b_2023521_010804.dat', 22: '6141.dat', 23: '85984pc2.dat', 24: '3010.dat', 25: '30414.dat', 26: '2431pt0.dat'} | |
| input_labels = input_data[:, -6] | |
| input_coords_x = input_data[:, -5] #normalized_data[1:-2:stride, :x_num+1].argmax(1) | |
| input_coords_y = input_data[:, -4] #normalized_data[0:-3:stride, :y_num+1].argmax(1) | |
| input_coords_z = input_data[:, -3] #normalized_data[2:-1:stride, :z_num+1].argmax(1) | |
| restored_coords = np.stack((input_coords_x, input_coords_y, input_coords_z), axis=-1) | |
| #flag = normalized_data[:, xyz+dat_num:xyz+dat_num+2].argmax(1) | |
| #flag = normalized_data[:, -2:].argmax(1) | |
| ldr_lines = [] | |
| #for i, entry in enumerate(normalized_data[:-1]): | |
| for i, entry in enumerate(input_data[:-1]): | |
| color = 0 #int(restored_colors[i]) | |
| x, y, z = restored_coords[i] | |
| label = label_inverse_mapping[str(np.clip(np.round(input_labels[i]), 0, dat_num).astype(int))] | |
| # quaternion = entry[4:8] # | |
| # quaternion = quaternion / np.linalg.norm(quaternion) # | |
| # r = R.from_quat(quaternion) | |
| #import ipdb; ipdb.set_trace() | |
| rotation_matrix = R24[entry[-7]]#r.as_matrix() | |
| f = 1# - flag[i] | |
| ldr_line = f"{f} {color} {x:.6f} {y:.6f} {z:.6f} " \ | |
| f"{rotation_matrix[0, 0]:.6f} {rotation_matrix[0, 1]:.6f} {rotation_matrix[0, 2]:.6f} " \ | |
| f"{rotation_matrix[1, 0]:.6f} {rotation_matrix[1, 1]:.6f} {rotation_matrix[1, 2]:.6f} " \ | |
| f"{rotation_matrix[2, 0]:.6f} {rotation_matrix[2, 1]:.6f} {rotation_matrix[2, 2]:.6f} " \ | |
| f"{label}\n" | |
| ldr_lines.append(ldr_line) | |
| with open(output_file, 'w') as f: | |
| f.writelines(ldr_lines) | |
| print(f"Restored LDR data saved to {output_file}") | |
| return ldr_lines | |
| def logits2flatldrpr(normalized_data, input_data, stride, given, label_inverse_mapping='', max_vals='', min_vals='', label_max='', label_min='', max_color='', min_color='', output_file='restored_data_rot_wop.ldr'): | |
| dat_num = 604 | |
| x_num = 213 | |
| y_num = 217 | |
| z_num = 529 | |
| rot_num = 24 | |
| R24 = signed_perm_mats_det_plus_1() | |
| x = x_num | |
| xy = x_num + y_num + rot_num | |
| xyz = x_num + y_num + z_num + rot_num | |
| stride = stride | |
| attr_shift = stride-3 #+1 for bert | |
| bert_shift = 1 | |
| # Use config-based paths (works in both local and HF Space environments) | |
| forward_path, inverse_path = get_mapping_paths("subset_self") | |
| label_mapping, label_inverse_mapping = load_mappings(forward_path, inverse_path) | |
| if label_inverse_mapping is None: | |
| # import ipdb; ipdb.set_trace() | |
| # #label_inverse_mapping = {0: '98281.dat', 1: '3005.dat', 2: '3004.dat', 3: '3795.dat', 4: '3020.dat', 5: '3710.dat', 6: '3666.dat', 7: '3021.dat', 8: '2431.dat', 9: '4488.dat', 10: '3829a.dat', 11: '3829b.dat', 12: '43723.dat', 13: '3068b.dat', 14: '43722.dat', 15: '3832.dat', 16: '2432.dat', 17: '2437.dat', 18: '6231.dat', 19: '3040b.dat', 20: '3024.dat', 21: '11211.dat', 22: '2540.dat', 23: '61678.dat', 24: '3665.dat', 25: '11477.dat', 26: '93594.dat', 27: '50951.dat', 28: '4073.dat', 29: '6019.dat', 30: '6091.dat', 31: '3821.dat', 32: '3822.dat', 33: '98138.dat', 34: '3794a.dat', 35: '4081b.dat', 36: '3022.dat', 37: '30039.dat', 38: '50946.dat', 39: '4095.dat'} #blue_classic_car | |
| label_inverse_mapping = {0: '24308b.dat', 1: '3031.dat', 2: '4079.dat', 3: '3021.dat', 4: '3024.dat', 5: '3020.dat', 6: '29120.dat', 7: '71076a.dat', 8: '3023.dat', 9: '29119.dat', 10: '2412b.dat', 11: '86876.dat', 12: '11211.dat', 13: '87087.dat', 14: '3004.dat', 15: '15068.dat', 16: '3829c01.dat', 17: '11477.dat', 18: '79393.dat', 19: '63864.dat', 20: '3710.dat', 21: 'm17f5892b_2023521_010804.dat', 22: '6141.dat', 23: '85984pc2.dat', 24: '3010.dat', 25: '30414.dat', 26: '2431pt0.dat'} | |
| #input_labels = normalized_data[0:-4:stride, :dat_num+1].argmax(1) # | |
| input_labels = input_data[:, -6] | |
| #input_labels[:given] = input_data[:given, -6] #normalized_data[0:-4:stride, :dat_num+1].argmax(1) | |
| input_rot = normalized_data[1+bert_shift:-3:stride, :rot_num+1].argmax(1) # #normalized_data[1:-3:stride, :rot_num+1].argmax(1) | |
| #input_rot = input_data[:, 0] | |
| input_coords_x = normalized_data[1+attr_shift+bert_shift:-1:stride, rot_num+1:x+rot_num+1+1].argmax(1) | |
| input_coords_y = normalized_data[0+attr_shift+bert_shift:-2:stride, x+rot_num+2:xy+3].argmax(1) | |
| input_coords_z = normalized_data[2+attr_shift+bert_shift::stride, xy+3:xyz+4].argmax(1) | |
| # input_coords_x[:given] = input_data[:given, -5] | |
| # input_coords_y[:given] = input_data[:given, -4] | |
| # input_coords_z[:given] = input_data[:given, -3] | |
| restored_coords = np.stack((input_coords_x, input_coords_y, input_coords_z), axis=-1) | |
| #flag = normalized_data[:, xyz+dat_num:xyz+dat_num+2].argmax(1) | |
| #flag = normalized_data[:, -2:].argmax(1) | |
| input_colors = np.zeros_like(input_data[:, 0]) | |
| #input_colors[:given] = (input_colors[:given] + 4) | |
| ldr_lines = [] | |
| #for i, entry in enumerate(normalized_data[:-1]): | |
| for i, entry in enumerate(input_data[:-1]): | |
| color = int(input_colors[i]) | |
| x, y, z = restored_coords[i] | |
| label = label_inverse_mapping[str(np.clip(np.round(input_labels[i]), 0, dat_num-1).astype(int))] | |
| # quaternion = entry[4:8] # | |
| # quaternion = quaternion / np.linalg.norm(quaternion) # | |
| # r = R.from_quat(quaternion) | |
| rotation_matrix = R24[np.clip(input_rot[i], 0, rot_num-1)] #R24[entry[-7]]#r.as_matrix() | |
| if x>212: | |
| f = 0# - flag[i] | |
| else: | |
| f = 1 | |
| ldr_line = f"{f} {color} {x:.6f} {y:.6f} {z:.6f} " \ | |
| f"{rotation_matrix[0, 0]:.6f} {rotation_matrix[0, 1]:.6f} {rotation_matrix[0, 2]:.6f} " \ | |
| f"{rotation_matrix[1, 0]:.6f} {rotation_matrix[1, 1]:.6f} {rotation_matrix[1, 2]:.6f} " \ | |
| f"{rotation_matrix[2, 0]:.6f} {rotation_matrix[2, 1]:.6f} {rotation_matrix[2, 2]:.6f} " \ | |
| f"{label}\n" | |
| ldr_lines.append(ldr_line) | |
| with open(output_file, 'w') as f: | |
| f.writelines(ldr_lines) | |
| print(f"Restored LDR data saved to {output_file}") | |
| return ldr_lines | |
| # def logits2botldrpr(normalized_data, input_data, stride, given, label_inverse_mapping='', max_vals='', min_vals='', label_max='', label_min='', max_color='', min_color='', output_file='restored_data_rot_wop.ldr'): | |
| # dat_num = 286 | |
| # x_num = 213 | |
| # y_num = 73 | |
| # z_num = 411 | |
| # rot_num = 24 | |
| # R24 = signed_perm_mats_det_plus_1() | |
| # x = x_num | |
| # xy = x_num + y_num + rot_num | |
| # xyz = x_num + y_num + z_num + rot_num | |
| # stride = stride | |
| # attr_shift = stride-3 #+1 for bert | |
| # bert_shift = 1 | |
| # label_mapping, label_inverse_mapping = load_mappings('../data/car_1k/subset_bottom_300/label_mapping.json', '../data/car_1k/subset_bottom_300/label_inverse_mapping.json') | |
| # if label_inverse_mapping is None: | |
| # import ipdb; ipdb.set_trace() | |
| # # #label_inverse_mapping = {0: '98281.dat', 1: '3005.dat', 2: '3004.dat', 3: '3795.dat', 4: '3020.dat', 5: '3710.dat', 6: '3666.dat', 7: '3021.dat', 8: '2431.dat', 9: '4488.dat', 10: '3829a.dat', 11: '3829b.dat', 12: '43723.dat', 13: '3068b.dat', 14: '43722.dat', 15: '3832.dat', 16: '2432.dat', 17: '2437.dat', 18: '6231.dat', 19: '3040b.dat', 20: '3024.dat', 21: '11211.dat', 22: '2540.dat', 23: '61678.dat', 24: '3665.dat', 25: '11477.dat', 26: '93594.dat', 27: '50951.dat', 28: '4073.dat', 29: '6019.dat', 30: '6091.dat', 31: '3821.dat', 32: '3822.dat', 33: '98138.dat', 34: '3794a.dat', 35: '4081b.dat', 36: '3022.dat', 37: '30039.dat', 38: '50946.dat', 39: '4095.dat'} #blue_classic_car | |
| # label_inverse_mapping = {0: '24308b.dat', 1: '3031.dat', 2: '4079.dat', 3: '3021.dat', 4: '3024.dat', 5: '3020.dat', 6: '29120.dat', 7: '71076a.dat', 8: '3023.dat', 9: '29119.dat', 10: '2412b.dat', 11: '86876.dat', 12: '11211.dat', 13: '87087.dat', 14: '3004.dat', 15: '15068.dat', 16: '3829c01.dat', 17: '11477.dat', 18: '79393.dat', 19: '63864.dat', 20: '3710.dat', 21: 'm17f5892b_2023521_010804.dat', 22: '6141.dat', 23: '85984pc2.dat', 24: '3010.dat', 25: '30414.dat', 26: '2431pt0.dat'} | |
| # #input_labels = normalized_data[0:-4:stride, :dat_num+1].argmax(1) # | |
| # input_labels = input_data[:, -6] | |
| # #input_labels[:given] = input_data[:given, -6] #normalized_data[0:-4:stride, :dat_num+1].argmax(1) | |
| # input_rot = normalized_data[1+bert_shift:-3:stride, :rot_num+1].argmax(1) # #normalized_data[1:-3:stride, :rot_num+1].argmax(1) | |
| # #input_rot = input_data[:, 0] | |
| # input_coords_x = normalized_data[1+attr_shift+bert_shift:-1:stride, rot_num+1:x+rot_num+1+1].argmax(1) | |
| # input_coords_y = normalized_data[0+attr_shift+bert_shift:-2:stride, x+rot_num+2:xy+3].argmax(1) | |
| # input_coords_z = normalized_data[2+attr_shift+bert_shift::stride, xy+3:xyz+4].argmax(1) | |
| # # input_coords_x[:given] = input_data[:given, -5] | |
| # # input_coords_y[:given] = input_data[:given, -4] | |
| # # input_coords_z[:given] = input_data[:given, -3] | |
| # restored_coords = np.stack((input_coords_x, input_coords_y, input_coords_z), axis=-1) | |
| # #flag = normalized_data[:, xyz+dat_num:xyz+dat_num+2].argmax(1) | |
| # #flag = normalized_data[:, -2:].argmax(1) | |
| # input_colors = np.zeros_like(input_data[:, 0]) | |
| # #input_colors[:given] = (input_colors[:given] + 4) | |
| # ldr_lines = [] | |
| # #for i, entry in enumerate(normalized_data[:-1]): | |
| # for i, entry in enumerate(input_data[:-1]): | |
| # color = int(input_colors[i]) | |
| # x, y, z = restored_coords[i] | |
| # label = label_inverse_mapping[str(np.clip(np.round(input_labels[i]), 0, dat_num-1).astype(int))] | |
| # # quaternion = entry[4:8] # | |
| # # quaternion = quaternion / np.linalg.norm(quaternion) # | |
| # # r = R.from_quat(quaternion) | |
| # rotation_matrix = R24[np.clip(input_rot[i], 0, rot_num-1)] #R24[entry[-7]]#r.as_matrix() | |
| # if x>212: | |
| # f = 0# - flag[i] | |
| # else: | |
| # f = 1 | |
| # ldr_line = f"{f} {color} {x:.6f} {y:.6f} {z:.6f} " \ | |
| # f"{rotation_matrix[0, 0]:.6f} {rotation_matrix[0, 1]:.6f} {rotation_matrix[0, 2]:.6f} " \ | |
| # f"{rotation_matrix[1, 0]:.6f} {rotation_matrix[1, 1]:.6f} {rotation_matrix[1, 2]:.6f} " \ | |
| # f"{rotation_matrix[2, 0]:.6f} {rotation_matrix[2, 1]:.6f} {rotation_matrix[2, 2]:.6f} " \ | |
| # f"{label}\n" | |
| # ldr_lines.append(ldr_line) | |
| # with open(output_file, 'w') as f: | |
| # f.writelines(ldr_lines) | |
| # print(f"Restored LDR data saved to {output_file}") | |
| # return ldr_lines | |
| def logits2botldrpr(normalized_data, input_data, stride, given, label_inverse_mapping='', max_vals='', min_vals='', label_max='', label_min='', max_color='', min_color='', output_file='restored_data_rot_wop.ldr'): | |
| dat_num = 1217 #286 | |
| x_num = 251 #213 | |
| y_num = 215 #73 | |
| z_num = 525 #411 | |
| rot_num = 24 | |
| R24 = signed_perm_mats_det_plus_1() | |
| x = x_num | |
| xy = x_num + y_num + rot_num | |
| xyz = x_num + y_num + z_num + rot_num | |
| stride = stride | |
| attr_shift = stride-3 #+1 for bert | |
| bert_shift = 1 | |
| # Use config-based paths (works in both local and HF Space environments) | |
| forward_path, inverse_path = get_mapping_paths("subset_1k") | |
| label_mapping, label_inverse_mapping = load_mappings(forward_path, inverse_path) | |
| if label_inverse_mapping is None: | |
| # import ipdb; ipdb.set_trace() | |
| # #label_inverse_mapping = {0: '98281.dat', 1: '3005.dat', 2: '3004.dat', 3: '3795.dat', 4: '3020.dat', 5: '3710.dat', 6: '3666.dat', 7: '3021.dat', 8: '2431.dat', 9: '4488.dat', 10: '3829a.dat', 11: '3829b.dat', 12: '43723.dat', 13: '3068b.dat', 14: '43722.dat', 15: '3832.dat', 16: '2432.dat', 17: '2437.dat', 18: '6231.dat', 19: '3040b.dat', 20: '3024.dat', 21: '11211.dat', 22: '2540.dat', 23: '61678.dat', 24: '3665.dat', 25: '11477.dat', 26: '93594.dat', 27: '50951.dat', 28: '4073.dat', 29: '6019.dat', 30: '6091.dat', 31: '3821.dat', 32: '3822.dat', 33: '98138.dat', 34: '3794a.dat', 35: '4081b.dat', 36: '3022.dat', 37: '30039.dat', 38: '50946.dat', 39: '4095.dat'} #blue_classic_car | |
| label_inverse_mapping = {0: '24308b.dat', 1: '3031.dat', 2: '4079.dat', 3: '3021.dat', 4: '3024.dat', 5: '3020.dat', 6: '29120.dat', 7: '71076a.dat', 8: '3023.dat', 9: '29119.dat', 10: '2412b.dat', 11: '86876.dat', 12: '11211.dat', 13: '87087.dat', 14: '3004.dat', 15: '15068.dat', 16: '3829c01.dat', 17: '11477.dat', 18: '79393.dat', 19: '63864.dat', 20: '3710.dat', 21: 'm17f5892b_2023521_010804.dat', 22: '6141.dat', 23: '85984pc2.dat', 24: '3010.dat', 25: '30414.dat', 26: '2431pt0.dat'} | |
| #input_labels = normalized_data[0:-4:stride, :dat_num+1].argmax(1) # | |
| input_labels = input_data[:, -6] | |
| #input_labels[:given] = input_data[:given, -6] #normalized_data[0:-4:stride, :dat_num+1].argmax(1) | |
| input_rot = normalized_data[1+bert_shift:-3:stride, :rot_num+1].argmax(1) # #normalized_data[1:-3:stride, :rot_num+1].argmax(1) | |
| #input_rot = input_data[:, 0] | |
| input_coords_x = normalized_data[1+attr_shift+bert_shift:-1:stride, rot_num+1:x+rot_num+1+1].argmax(1) | |
| input_coords_y = normalized_data[0+attr_shift+bert_shift:-2:stride, x+rot_num+2:xy+3].argmax(1) | |
| input_coords_z = normalized_data[2+attr_shift+bert_shift::stride, xy+3:xyz+4].argmax(1) | |
| # input_coords_x[:given] = input_data[:given, -5] | |
| # input_coords_y[:given] = input_data[:given, -4] | |
| # input_coords_z[:given] = input_data[:given, -3] | |
| restored_coords = np.stack((input_coords_x, input_coords_y, input_coords_z), axis=-1) | |
| #flag = normalized_data[:, xyz+dat_num:xyz+dat_num+2].argmax(1) | |
| #flag = normalized_data[:, -2:].argmax(1) | |
| input_colors = np.zeros_like(input_data[:, 0]) | |
| #input_colors[:given] = (input_colors[:given] + 4) | |
| ldr_lines = [] | |
| #for i, entry in enumerate(normalized_data[:-1]): | |
| for i, entry in enumerate(input_data[:-1]): | |
| color = int(input_colors[i]) | |
| x, y, z = restored_coords[i] | |
| label = label_inverse_mapping[str(np.clip(np.round(input_labels[i]), 0, dat_num-1).astype(int))] | |
| # quaternion = entry[4:8] # | |
| # quaternion = quaternion / np.linalg.norm(quaternion) # | |
| # r = R.from_quat(quaternion) | |
| rotation_matrix = R24[np.clip(input_rot[i], 0, rot_num-1)] #R24[entry[-7]]#r.as_matrix() | |
| if x>(x_num-1): | |
| f = 0# - flag[i] | |
| else: | |
| f = 1 | |
| ldr_line = f"{f} {color} {x:.6f} {y:.6f} {z:.6f} " \ | |
| f"{rotation_matrix[0, 0]:.6f} {rotation_matrix[0, 1]:.6f} {rotation_matrix[0, 2]:.6f} " \ | |
| f"{rotation_matrix[1, 0]:.6f} {rotation_matrix[1, 1]:.6f} {rotation_matrix[1, 2]:.6f} " \ | |
| f"{rotation_matrix[2, 0]:.6f} {rotation_matrix[2, 1]:.6f} {rotation_matrix[2, 2]:.6f} " \ | |
| f"{label}\n" | |
| ldr_lines.append(ldr_line) | |
| with open(output_file, 'w') as f: | |
| f.writelines(ldr_lines) | |
| print(f"Restored LDR data saved to {output_file}") | |
| return ldr_lines | |
| def ids2flatldrpr(normalized_data, input_data, stride, given, label_inverse_mapping='', max_vals='', min_vals='', label_max='', label_min='', max_color='', min_color='', output_file='restored_data_rot_wop.ldr'): | |
| dat_num = 604 | |
| x_num = 213 | |
| y_num = 217 | |
| z_num = 529 | |
| rot_num = 24 | |
| R24 = signed_perm_mats_det_plus_1() | |
| x = x_num | |
| xy = x_num + y_num + rot_num | |
| xyz = x_num + y_num + z_num + rot_num | |
| stride = stride | |
| attr_shift = stride-3 | |
| # Use config-based paths (works in both local and HF Space environments) | |
| forward_path, inverse_path = get_mapping_paths("subset_self") | |
| label_mapping, label_inverse_mapping = load_mappings(forward_path, inverse_path) | |
| if label_inverse_mapping is None: | |
| # import ipdb; ipdb.set_trace() | |
| # #label_inverse_mapping = {0: '98281.dat', 1: '3005.dat', 2: '3004.dat', 3: '3795.dat', 4: '3020.dat', 5: '3710.dat', 6: '3666.dat', 7: '3021.dat', 8: '2431.dat', 9: '4488.dat', 10: '3829a.dat', 11: '3829b.dat', 12: '43723.dat', 13: '3068b.dat', 14: '43722.dat', 15: '3832.dat', 16: '2432.dat', 17: '2437.dat', 18: '6231.dat', 19: '3040b.dat', 20: '3024.dat', 21: '11211.dat', 22: '2540.dat', 23: '61678.dat', 24: '3665.dat', 25: '11477.dat', 26: '93594.dat', 27: '50951.dat', 28: '4073.dat', 29: '6019.dat', 30: '6091.dat', 31: '3821.dat', 32: '3822.dat', 33: '98138.dat', 34: '3794a.dat', 35: '4081b.dat', 36: '3022.dat', 37: '30039.dat', 38: '50946.dat', 39: '4095.dat'} #blue_classic_car | |
| label_inverse_mapping = {0: '24308b.dat', 1: '3031.dat', 2: '4079.dat', 3: '3021.dat', 4: '3024.dat', 5: '3020.dat', 6: '29120.dat', 7: '71076a.dat', 8: '3023.dat', 9: '29119.dat', 10: '2412b.dat', 11: '86876.dat', 12: '11211.dat', 13: '87087.dat', 14: '3004.dat', 15: '15068.dat', 16: '3829c01.dat', 17: '11477.dat', 18: '79393.dat', 19: '63864.dat', 20: '3710.dat', 21: 'm17f5892b_2023521_010804.dat', 22: '6141.dat', 23: '85984pc2.dat', 24: '3010.dat', 25: '30414.dat', 26: '2431pt0.dat'} | |
| input_labels = normalized_data[0:-4:stride, :dat_num+1] # | |
| #input_labels[:given] = input_data[:given, -6] #normalized_data[0:-4:stride, :dat_num+1].argmax(1) | |
| input_rot = normalized_data[1:-3:stride, :rot_num+1] #input_data[:, 0] #normalized_data[1:-3:stride, :rot_num+1].argmax(1) | |
| #input_rot[:given] = input_data[:given, 0] | |
| input_coords_x = normalized_data[1+attr_shift:-1:stride, :x_num+1] | |
| input_coords_y = normalized_data[0+attr_shift:-2:stride, :y_num+1] | |
| input_coords_z = normalized_data[2+attr_shift::stride, :z_num+1] | |
| # input_coords_x[:given] = input_data[:given, -5] | |
| # input_coords_y[:given] = input_data[:given, -4] | |
| # input_coords_z[:given] = input_data[:given, -3] | |
| restored_coords = np.stack((input_coords_x, input_coords_y, input_coords_z), axis=-1) | |
| #flag = normalized_data[:, xyz+dat_num:xyz+dat_num+2].argmax(1) | |
| #flag = normalized_data[:, -2:].argmax(1) | |
| input_colors = np.zeros_like(input_data[:, 0]) | |
| #input_colors[:given] = (input_colors[:given] + 4) | |
| ldr_lines = [] | |
| #for i, entry in enumerate(normalized_data[:-1]): | |
| for i, entry in enumerate(input_data[:-1]): | |
| color = int(input_colors[i]) | |
| x, y, z = np.squeeze(restored_coords, axis=1)[i] | |
| label = label_inverse_mapping[str(np.clip(np.round(input_labels[i].item()), 0, dat_num-1).astype(int))] | |
| # quaternion = entry[4:8] # | |
| # quaternion = quaternion / np.linalg.norm(quaternion) # | |
| # r = R.from_quat(quaternion) | |
| rotation_matrix = R24[int(np.clip(input_rot[i].item(), 0, rot_num-1))] #R24[entry[-7]]#r.as_matrix() | |
| if x>212: | |
| f = 0# - flag[i] | |
| else: | |
| f = 1 | |
| ldr_line = f"{f} {color} {x:.6f} {y:.6f} {z:.6f} " \ | |
| f"{rotation_matrix[0, 0]:.6f} {rotation_matrix[0, 1]:.6f} {rotation_matrix[0, 2]:.6f} " \ | |
| f"{rotation_matrix[1, 0]:.6f} {rotation_matrix[1, 1]:.6f} {rotation_matrix[1, 2]:.6f} " \ | |
| f"{rotation_matrix[2, 0]:.6f} {rotation_matrix[2, 1]:.6f} {rotation_matrix[2, 2]:.6f} " \ | |
| f"{label}\n" | |
| ldr_lines.append(ldr_line) | |
| with open(output_file, 'w') as f: | |
| f.writelines(ldr_lines) | |
| print(f"Restored LDR data saved to {output_file}") | |
| return ldr_lines | |
| def main(input_file): | |
| lines = read_ldr_file(input_file) | |
| processed_data, label_inverse_mapping = process_ldr_data(lines) # 处理LDR数据 | |
| inverted_data = logits2ldr(processed_data, label_inverse_mapping) # 将标准化数据转换回原始数据格式 | |
| # import ipdb; ipdb.set_trace() | |
| # output_file = os.path.splitext(input_file)[0] + '_wrdhot' + '.npy' | |
| # save_data_as_npy(processed_data, output_file) # 保存为.npy文件 | |
| # print(f"Processed data has been saved to {output_file}") | |
| # 示例 | |
| input_file = '/public/home/wangshuo/gap/assembly/data/blue classic car/modified_blue classic car.ldr' # 输入LDR文件路径 | |
| #main(input_file) | |