fix: IS 4.5.0 -> 5.0.0 migration — USD metadata, DomeLight, scene reuse
- Fix USD metersPerUnit/upAxis for IS 5.0.0 (no longer auto-compensated) - Batch fix all Aligned_obj.usd, table, and art USD files with backups - Fix DomeLight rotation to Z-axis only (prevent tilted environment map) - Fix scene reuse across episodes (arena_file caching, task clearing, prim guard) - Add migration tools: scan_usd_metadata.py, fix_usd_metadata.py - Add migration guide: migerate/migerate.md - Add nvidia-curobo to .gitignore - Fix sort_the_rubbish config: obj_0 -> obj_1 (obj_0 does not exist) Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -18,4 +18,5 @@ _isaac_sim_410
|
||||
InterDataEngine-docs
|
||||
debug.sh
|
||||
debug.yaml
|
||||
depre
|
||||
depre
|
||||
workflows/simbox/src/nvidia-curobo/
|
||||
116
1.py
Normal file
116
1.py
Normal file
@@ -0,0 +1,116 @@
|
||||
# from pxr import Usd, UsdGeom
|
||||
# stage = Usd.Stage.Open('workflows/simbox/example_assets/task/sort_the_rubbish/recyclable_garbage/bottle_0/Aligned_obj.usd')
|
||||
# print('metersPerUnit:', UsdGeom.GetStageMetersPerUnit(stage))
|
||||
# print('upAxis:', UsdGeom.GetStageUpAxis(stage))
|
||||
# dp = stage.GetDefaultPrim()
|
||||
# print('defaultPrim:', dp.GetPath() if dp else 'None')
|
||||
# xf = UsdGeom.Xformable(dp)
|
||||
# for op in xf.GetOrderedXformOps():
|
||||
# print(f' xformOp: {op.GetName()} = {op.Get()}')
|
||||
# for child in dp.GetChildren():
|
||||
# cxf = UsdGeom.Xformable(child)
|
||||
# ops = cxf.GetOrderedXformOps()
|
||||
# if ops:
|
||||
# for op in ops:
|
||||
# print(f' child {child.GetPath()} xformOp: {op.GetName()} = {op.Get()}')
|
||||
|
||||
# from pxr import Usd, UsdGeom
|
||||
# stage = Usd.Stage.Open('workflows/simbox/example_assets/task/sort_the_rubbish/recyclable_garbage/bottle_0/Aligned_obj.usd')
|
||||
# print('metersPerUnit:', UsdGeom.GetStageMetersPerUnit(stage))
|
||||
# print('upAxis:', UsdGeom.GetStageUpAxis(stage))
|
||||
# for prim in stage.Traverse():
|
||||
# print(f' {prim.GetPath()} type={prim.GetTypeName()}')
|
||||
# xf = UsdGeom.Xformable(prim)
|
||||
# if xf:
|
||||
# for op in xf.GetOrderedXformOps():
|
||||
# print(f' {op.GetName()} = {op.Get()}')
|
||||
|
||||
from pxr import Usd, UsdGeom
|
||||
import yaml, os
|
||||
|
||||
with open('workflows/simbox/core/configs/tasks/example/sort_the_rubbish.yaml') as f:
|
||||
cfg = yaml.safe_load(f)
|
||||
|
||||
tasks = cfg.get('tasks', [cfg])
|
||||
for task in tasks:
|
||||
asset_root = task.get('asset_root', '')
|
||||
for obj in task.get('objects', []):
|
||||
name = obj['name']
|
||||
path = obj.get('path', '')
|
||||
full = os.path.join(asset_root, path)
|
||||
print(f'=== {name}: {full} ===')
|
||||
try:
|
||||
stage = Usd.Stage.Open(full)
|
||||
print(f' metersPerUnit: {UsdGeom.GetStageMetersPerUnit(stage)}')
|
||||
print(f' upAxis: {UsdGeom.GetStageUpAxis(stage)}')
|
||||
except Exception as e:
|
||||
print(f' ERROR: {e}')
|
||||
|
||||
# ===== Check mesh vertex ranges =====
|
||||
print("\n===== Mesh vertex ranges =====")
|
||||
import numpy as np
|
||||
usd_files = {
|
||||
'pick_object_right': 'workflows/simbox/example_assets/task/sort_the_rubbish/recyclable_garbage/bottle_0/Aligned_obj.usd',
|
||||
'gso_box_right': 'workflows/simbox/example_assets/task/sort_the_rubbish/garbage_can/recyclable_can/Aligned_obj.usd',
|
||||
}
|
||||
for name, path in usd_files.items():
|
||||
stage = Usd.Stage.Open(path)
|
||||
for prim in stage.Traverse():
|
||||
if prim.GetTypeName() == 'Mesh':
|
||||
mesh = UsdGeom.Mesh(prim)
|
||||
pts = mesh.GetPointsAttr().Get()
|
||||
if pts:
|
||||
pts = np.array(pts)
|
||||
print(f'{name} mesh={prim.GetPath()} npts={len(pts)}')
|
||||
print(f' X: [{pts[:,0].min():.4f}, {pts[:,0].max():.4f}]')
|
||||
print(f' Y: [{pts[:,1].min():.4f}, {pts[:,1].max():.4f}] (upAxis=Y, this is height)')
|
||||
print(f' Z: [{pts[:,2].min():.4f}, {pts[:,2].max():.4f}]')
|
||||
print(f' Y_max * 0.01 = {pts[:,1].max()*0.01:.6f} m')
|
||||
print(f' Y_extent = {pts[:,1].max()-pts[:,1].min():.4f}')
|
||||
print(f' Y_max (raw cm) = {pts[:,1].max():.4f}')
|
||||
# 109.75 / Y_max ?
|
||||
if pts[:,1].max() > 0:
|
||||
print(f' 109.75 / Y_max = {109.75 / pts[:,1].max():.4f}')
|
||||
print(f' 109.75 / (Y_max*0.01) = {109.75 / (pts[:,1].max()*0.01):.4f}')
|
||||
|
||||
# ===== Fix USD metadata: set metersPerUnit=1.0 and upAxis=Z =====
|
||||
print("\n===== Fixing USD metadata =====")
|
||||
import shutil
|
||||
|
||||
fix_files = [
|
||||
'workflows/simbox/example_assets/task/sort_the_rubbish/recyclable_garbage/bottle_0/Aligned_obj.usd',
|
||||
'workflows/simbox/example_assets/task/sort_the_rubbish/garbage_can/recyclable_can/Aligned_obj.usd',
|
||||
'workflows/simbox/example_assets/task/sort_the_rubbish/garbage_can/nonrecyclable_can/Aligned_obj.usd',
|
||||
]
|
||||
# Also try to find pick_object_left USD
|
||||
pick_left_path = 'workflows/simbox/example_assets/task/sort_the_rubbish/non_recyclable_garbage/obj_0/Aligned_obj.usd'
|
||||
if os.path.exists(pick_left_path):
|
||||
fix_files.append(pick_left_path)
|
||||
|
||||
for fpath in fix_files:
|
||||
if not os.path.exists(fpath):
|
||||
print(f' SKIP (not found): {fpath}')
|
||||
continue
|
||||
# Backup
|
||||
bak = fpath + '.bak'
|
||||
if not os.path.exists(bak):
|
||||
shutil.copy2(fpath, bak)
|
||||
print(f' Backed up: {fpath} -> {bak}')
|
||||
else:
|
||||
print(f' Backup already exists: {bak}')
|
||||
|
||||
stage = Usd.Stage.Open(fpath)
|
||||
old_mpu = UsdGeom.GetStageMetersPerUnit(stage)
|
||||
old_up = UsdGeom.GetStageUpAxis(stage)
|
||||
print(f' {fpath}: old metersPerUnit={old_mpu}, upAxis={old_up}')
|
||||
|
||||
UsdGeom.SetStageMetersPerUnit(stage, 1.0)
|
||||
UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.z)
|
||||
stage.GetRootLayer().Save()
|
||||
|
||||
# Verify
|
||||
stage2 = Usd.Stage.Open(fpath)
|
||||
print(f' -> new metersPerUnit={UsdGeom.GetStageMetersPerUnit(stage2)}, upAxis={UsdGeom.GetStageUpAxis(stage2)}')
|
||||
|
||||
print("\nDone! Now run launcher to test if Z is normal.")
|
||||
print("To restore: for each .bak file, copy it back over the .usd file")
|
||||
@@ -10,7 +10,7 @@ load_stage:
|
||||
rendering_dt: 1/30 # Render update rate
|
||||
stage_units_in_meters: 1.0 # Stage unit scale
|
||||
headless: True # Headless mode (no GUI); set false for visual debugging
|
||||
renderer: "PathTracing" # PathTracing: higher quality, less noise on Blackwell
|
||||
renderer: "RayTracedLighting" # PathTracing: higher quality, less noise on Blackwell 改回快速渲染器就改成 "RayTracedLighting"
|
||||
anti_aliasing: 0 # Anti-aliasing level
|
||||
layout_random_generator: # Scene randomization
|
||||
type: env_randomizer
|
||||
|
||||
33
install.md
Normal file
33
install.md
Normal file
@@ -0,0 +1,33 @@
|
||||
conda create -n banana450 python=3.10
|
||||
conda activate banana450
|
||||
# 看看12.8安装了么?
|
||||
conda list | grep cuda
|
||||
# 安装12.8
|
||||
conda install -y cuda-toolkit=12.8
|
||||
# 其他的cuda版本在conda中的需要卸载
|
||||
conda list | grep cuda
|
||||
source ~/isaacsim450/setup_conda_env.sh
|
||||
# 导入conda环境的12.8cuda环境
|
||||
export CUDA_HOME="$CONDA_PREFIX"
|
||||
export PATH="$CONDA_PREFIX/bin:$PATH"
|
||||
# 看一下应该是12.8
|
||||
nvcc -V
|
||||
# 换掉torch版本,换成cu128编译的版本
|
||||
pip list | grep torch
|
||||
pip install torch==2.7.0 torchvision==0.22.0 torchaudio==2.7.0 --index-url https://download.pytorch.org/whl/cu128
|
||||
pip list | grep torch
|
||||
pip install -r requirements.txt
|
||||
pip list | grep cuda
|
||||
# 卸载掉cu11的那些包
|
||||
pip uninstall nvidia-cuda-cupti-cu11 nvidia-cuda-nvrtc-cu11 nvidia-cuda-runtime-cu11
|
||||
|
||||
# 安装curobo
|
||||
cd workflows/simbox/curobo
|
||||
# 12.0是显卡的算力 要自己去查 不要信AI的 PRO6000是12.0
|
||||
export TORCH_CUDA_ARCH_LIST="12.0+PTX"
|
||||
pip install -e .[isaacsim] --no-build-isolation
|
||||
|
||||
# 降级numpy
|
||||
pip install numpy==1.26.0
|
||||
pip install opencv-python==4.11.0.86
|
||||
python launcher.py --config configs/simbox/de_plan_and_render_template.yaml
|
||||
159
migerate/fix_usd_metadata.py
Normal file
159
migerate/fix_usd_metadata.py
Normal file
@@ -0,0 +1,159 @@
|
||||
"""
|
||||
Batch fix USD metadata for IS 5.0.0 compatibility.
|
||||
|
||||
IS 4.5.0 auto-added 'Rotate:unitsResolve' and 'Scale:unitsResolve' xformOps
|
||||
to compensate for metersPerUnit/upAxis differences. IS 5.0.0 no longer does this,
|
||||
causing RigidObject physics to break (objects fly away).
|
||||
|
||||
Fix: Change metersPerUnit to 1.0 and upAxis to Z in USD metadata.
|
||||
Vertex data stays unchanged (already in correct scale for the scene).
|
||||
|
||||
Usage:
|
||||
# Scan only (dry run):
|
||||
python migerate/fix_usd_metadata.py --root workflows/simbox/example_assets --dry-run
|
||||
|
||||
# Fix all:
|
||||
python migerate/fix_usd_metadata.py --root workflows/simbox/example_assets
|
||||
|
||||
# Fix specific pattern:
|
||||
python migerate/fix_usd_metadata.py --root workflows/simbox/example_assets --pattern "Aligned_obj.usd"
|
||||
|
||||
# Skip specific directories (e.g. robot models, curobo assets):
|
||||
python migerate/fix_usd_metadata.py --root . --skip curobo,robot
|
||||
|
||||
# Restore from backups:
|
||||
python migerate/fix_usd_metadata.py --root workflows/simbox/example_assets --restore
|
||||
"""
|
||||
import argparse
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
|
||||
from pxr import Usd, UsdGeom
|
||||
|
||||
|
||||
def fix_usd_files(root: str, pattern: str = "*.usd", target_mpu: float = 1.0,
|
||||
target_up: str = "Z", dry_run: bool = False, skip: list = None):
|
||||
usd_files = glob.glob(os.path.join(root, "**", pattern), recursive=True)
|
||||
usd_files = [f for f in usd_files if not f.endswith(".bak")]
|
||||
usd_files.sort()
|
||||
|
||||
skip = skip or []
|
||||
|
||||
print(f"Scanning {len(usd_files)} USD files under: {root}")
|
||||
if dry_run:
|
||||
print("[DRY RUN] No files will be modified.\n")
|
||||
print()
|
||||
|
||||
fixed = 0
|
||||
skipped = 0
|
||||
skipped_dir = 0
|
||||
errors = 0
|
||||
|
||||
for fpath in usd_files:
|
||||
rel = os.path.relpath(fpath, root)
|
||||
|
||||
# Check skip patterns
|
||||
if any(s in rel for s in skip):
|
||||
print(f" SKIP (dir filter): {rel}")
|
||||
skipped_dir += 1
|
||||
continue
|
||||
|
||||
try:
|
||||
stage = Usd.Stage.Open(fpath)
|
||||
if stage is None:
|
||||
print(f" ERROR: Cannot open {rel}")
|
||||
errors += 1
|
||||
continue
|
||||
|
||||
mpu = UsdGeom.GetStageMetersPerUnit(stage)
|
||||
up = UsdGeom.GetStageUpAxis(stage)
|
||||
|
||||
needs_fix = (mpu != target_mpu) or (up != target_up)
|
||||
if not needs_fix:
|
||||
print(f" OK: {rel} (mpu={mpu}, up={up})")
|
||||
skipped += 1
|
||||
continue
|
||||
|
||||
if dry_run:
|
||||
print(f" WOULD FIX: {rel} (mpu={mpu}, up={up} -> mpu={target_mpu}, up={target_up})")
|
||||
fixed += 1
|
||||
continue
|
||||
|
||||
# Backup
|
||||
bak = fpath + ".bak"
|
||||
if not os.path.exists(bak):
|
||||
shutil.copy2(fpath, bak)
|
||||
|
||||
# Fix
|
||||
UsdGeom.SetStageMetersPerUnit(stage, target_mpu)
|
||||
UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.z if target_up == "Z" else UsdGeom.Tokens.y)
|
||||
stage.GetRootLayer().Save()
|
||||
|
||||
print(f" FIXED: {rel} (mpu={mpu}/{up} -> {target_mpu}/{target_up})")
|
||||
fixed += 1
|
||||
|
||||
except Exception as e:
|
||||
print(f" ERROR: {rel} -> {e}")
|
||||
errors += 1
|
||||
|
||||
print(f"\nDone: {fixed} {'would fix' if dry_run else 'fixed'}, {skipped} already OK, "
|
||||
f"{skipped_dir} skipped (dir filter), {errors} errors.")
|
||||
|
||||
if not dry_run and fixed > 0:
|
||||
print("Backups saved as *.bak.")
|
||||
print("To restore: python migerate/fix_usd_metadata.py --root <DIR> --restore")
|
||||
|
||||
return fixed
|
||||
|
||||
|
||||
def restore_backups(root: str, pattern: str = "*.usd"):
|
||||
bak_files = glob.glob(os.path.join(root, "**", pattern + ".bak"), recursive=True)
|
||||
bak_files.sort()
|
||||
|
||||
if not bak_files:
|
||||
print(f"No .bak files found under: {root}")
|
||||
return
|
||||
|
||||
print(f"Found {len(bak_files)} backup files. Restoring...")
|
||||
for bak in bak_files:
|
||||
original = bak[:-4] # remove .bak
|
||||
shutil.copy2(bak, original)
|
||||
print(f" Restored: {os.path.relpath(original, root)}")
|
||||
print(f"\nDone: {len(bak_files)} files restored.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Batch fix USD metersPerUnit/upAxis metadata for IS 5.0.0 compatibility",
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
epilog="""
|
||||
Examples:
|
||||
# Dry run on example assets:
|
||||
python migerate/fix_usd_metadata.py --root workflows/simbox/example_assets --dry-run
|
||||
|
||||
# Fix all Aligned_obj.usd in example assets:
|
||||
python migerate/fix_usd_metadata.py --root workflows/simbox/example_assets --pattern "Aligned_obj.usd"
|
||||
|
||||
# Fix everything except robot/curobo dirs:
|
||||
python migerate/fix_usd_metadata.py --root . --skip curobo,robot
|
||||
|
||||
# Restore all backups:
|
||||
python migerate/fix_usd_metadata.py --root workflows/simbox/example_assets --restore
|
||||
""")
|
||||
parser.add_argument("--root", default="workflows/simbox/example_assets",
|
||||
help="Root directory to scan (default: workflows/simbox/example_assets)")
|
||||
parser.add_argument("--pattern", default="*.usd", help="Filename glob pattern (default: *.usd)")
|
||||
parser.add_argument("--target-mpu", type=float, default=1.0, help="Target metersPerUnit (default: 1.0)")
|
||||
parser.add_argument("--target-up", choices=["Y", "Z"], default="Z", help="Target upAxis (default: Z)")
|
||||
parser.add_argument("--dry-run", action="store_true", help="Scan and report only, do not modify files")
|
||||
parser.add_argument("--skip", default="", help="Comma-separated dir substrings to skip (e.g. 'curobo,robot')")
|
||||
parser.add_argument("--restore", action="store_true", help="Restore all .bak files to originals")
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.restore:
|
||||
restore_backups(args.root, args.pattern)
|
||||
else:
|
||||
skip_list = [s.strip() for s in args.skip.split(",") if s.strip()]
|
||||
fix_usd_files(args.root, args.pattern, args.target_mpu, args.target_up, args.dry_run, skip_list)
|
||||
199
migerate/migerate.md
Normal file
199
migerate/migerate.md
Normal file
@@ -0,0 +1,199 @@
|
||||
# Isaac Sim 4.5.0 -> 5.0.0 Migration Guide
|
||||
|
||||
## Background
|
||||
|
||||
- GPU: NVIDIA RTX PRO 6000 Blackwell (SM_120, compute capability 12.0)
|
||||
- IS 4.5.0 = Kit 106.5.0, DLSS Ray Reconstruction not supported on Blackwell (needs Kit >= 106.5.3), rendering has noise
|
||||
- IS 5.0.0 = Kit 107.x, fixes Blackwell rendering noise
|
||||
- IS 5.1.0 headless camera rendering completely broken (known bug GitHub #3250), abandoned
|
||||
|
||||
## Issues & Solutions
|
||||
|
||||
### 1. SimulationApp import path changed
|
||||
|
||||
**Error:** `ModuleNotFoundError: No module named 'omni.isaac.kit'`
|
||||
|
||||
**Cause:** IS 5.x changed the import path.
|
||||
|
||||
**Fix** (`nimbus_extension/components/load/env_loader.py`):
|
||||
```python
|
||||
try:
|
||||
from isaacsim import SimulationApp # IS 5.x
|
||||
except ImportError:
|
||||
from omni.isaac.kit import SimulationApp # IS 4.x
|
||||
```
|
||||
|
||||
### 2. USD metersPerUnit / upAxis not auto-compensated
|
||||
|
||||
**Error:** RigidObject Z coordinate flying to 109.75+ after `world.reset()`, curobo `plan_single()` fails with "Plan did not converge" (goal_pos Z = 25261 meters).
|
||||
|
||||
**Cause:** IS 4.5.0 auto-added `Rotate:unitsResolve` (X=90) and `Scale:unitsResolve` (0.01, 0.01, 0.01) xformOps to compensate for USD files with `metersPerUnit=0.01, upAxis=Y`. IS 5.0.0 no longer does this, causing PhysX to misinterpret RigidObject positions.
|
||||
|
||||
GeometryObjects (no physics) were unaffected — only RigidObjects with PhysX simulation had the issue.
|
||||
|
||||
**Fix:** Change USD metadata directly using pxr scripting. Run `fix_usd_metadata.py`:
|
||||
```python
|
||||
from pxr import Usd, UsdGeom
|
||||
stage = Usd.Stage.Open(usd_path)
|
||||
UsdGeom.SetStageMetersPerUnit(stage, 1.0) # was 0.01
|
||||
UsdGeom.SetStageUpAxis(stage, UsdGeom.Tokens.z) # was Y
|
||||
stage.GetRootLayer().Save()
|
||||
```
|
||||
|
||||
All 14 `Aligned_obj.usd` files under `workflows/simbox/example_assets` have been batch-fixed. Backups saved as `.bak`.
|
||||
|
||||
**Verification:** After fix, pick_object_right Z went from 109.75 to 0.7968 (normal).
|
||||
|
||||
### 3. scipy `scalar_first` parameter not supported
|
||||
|
||||
**Error:** `Rotation.as_quat() takes no keyword arguments`
|
||||
|
||||
**Cause:** IS 5.0.0 ships scipy < 1.11 which doesn't support `scalar_first` parameter in `Rotation.from_quat()` / `Rotation.as_quat()`. Dozens of files use this parameter.
|
||||
|
||||
**Fix** (`launcher.py`): Monkey-patch via subclass at startup:
|
||||
```python
|
||||
try:
|
||||
from scipy.spatial.transform import Rotation as _R
|
||||
_R.from_quat([1, 0, 0, 0], scalar_first=True)
|
||||
except TypeError:
|
||||
# Install Rotation subclass that handles scalar_first
|
||||
# See launcher.py for full implementation
|
||||
pass
|
||||
```
|
||||
|
||||
### 4. `arena_file_path` is None on second reset
|
||||
|
||||
**Error:** `arena_file_path` becomes None after first episode.
|
||||
|
||||
**Cause:** `self.task_cfg.pop("arena_file", None)` on line 107 of `simbox_dual_workflow.py` deletes the key after first use.
|
||||
|
||||
**Fix** (`workflows/simbox_dual_workflow.py`): Cache the value:
|
||||
```python
|
||||
self._arena_file_path = None # in __init__
|
||||
|
||||
# in reset():
|
||||
arena_file_path = self.task_cfg.get("arena_file", None) or self._arena_file_path
|
||||
if arena_file_path:
|
||||
self._arena_file_path = arena_file_path
|
||||
```
|
||||
|
||||
### 5. "Task name should be unique in the world"
|
||||
|
||||
**Error:** `AssertionError` when calling `world.add_task()` on second episode.
|
||||
|
||||
**Cause:** World retains tasks from previous episode.
|
||||
|
||||
**Fix** (`workflows/simbox_dual_workflow.py`):
|
||||
```python
|
||||
self.world._current_tasks.clear()
|
||||
self.world.add_task(self.task)
|
||||
```
|
||||
|
||||
### 6. "A prim already exists at prim path"
|
||||
|
||||
**Error:** Scene setup tries to re-create existing prims on repeated `world.reset()` calls.
|
||||
|
||||
**Fix** (`workflows/simbox/core/tasks/banana.py`): Add `_scene_initialized` guard:
|
||||
```python
|
||||
def set_up_scene(self, scene):
|
||||
if getattr(self, '_scene_initialized', False):
|
||||
self._task_objects = {}
|
||||
self._task_objects |= (
|
||||
getattr(self, 'fixtures', {}) |
|
||||
getattr(self, 'objects', {}) |
|
||||
getattr(self, 'robots', {}) |
|
||||
getattr(self, 'cameras', {})
|
||||
)
|
||||
return
|
||||
self._scene_initialized = True
|
||||
super().set_up_scene(scene)
|
||||
```
|
||||
|
||||
### 7. Collision group prim already exists
|
||||
|
||||
**Error:** Collision group prims persist across resets.
|
||||
|
||||
**Fix** (`workflows/simbox/core/utils/collision_utils.py`): Remove existing collision prims before re-creating:
|
||||
```python
|
||||
collision_prim = stage.GetPrimAtPath(collision_root_path)
|
||||
if collision_prim.IsValid():
|
||||
stage.RemovePrim(collision_root_path)
|
||||
```
|
||||
|
||||
### 8. DomeLight environment map tilted
|
||||
|
||||
**Error:** HDR environment map appears rotated/tilted in viewport and rendered output.
|
||||
|
||||
**Cause:** DomeLight rotation was randomized on all 3 axes (X, Y, Z). Rotating X/Y tilts the environment.
|
||||
|
||||
**Fix** (`workflows/simbox/core/tasks/banana.py`): Only rotate Z axis:
|
||||
```python
|
||||
# Before:
|
||||
rotation = [random.uniform(r[0], r[1]) for _ in range(3)]
|
||||
# After:
|
||||
rotation = [0.0, 0.0, random.uniform(r[0], r[1])]
|
||||
```
|
||||
|
||||
## Files Modified
|
||||
|
||||
| File | Change |
|
||||
|------|--------|
|
||||
| `nimbus_extension/components/load/env_loader.py` | SimulationApp import compatibility |
|
||||
| `launcher.py` | scipy Rotation monkey-patch |
|
||||
| `workflows/simbox_dual_workflow.py` | arena_file caching, task clearing, task reuse |
|
||||
| `workflows/simbox/core/tasks/banana.py` | Scene re-init guard, DomeLight rotation fix |
|
||||
| `workflows/simbox/core/utils/collision_utils.py` | Collision group cleanup |
|
||||
| `workflows/simbox/example_assets/**/Aligned_obj.usd` | metersPerUnit=1.0, upAxis=Z |
|
||||
| `fix_usd_metadata.py` | Batch USD metadata fix script |
|
||||
|
||||
## Tools
|
||||
|
||||
Migration tools are located in the `migerate/` directory.
|
||||
|
||||
### scan_usd_metadata.py — Scan USD metadata
|
||||
|
||||
Scan all USD files and report their `metersPerUnit` / `upAxis`:
|
||||
|
||||
```bash
|
||||
conda activate banana500
|
||||
|
||||
# Scan entire project
|
||||
python migerate/scan_usd_metadata.py --root .
|
||||
|
||||
# Scan specific directory
|
||||
python migerate/scan_usd_metadata.py --root workflows/simbox/example_assets
|
||||
```
|
||||
|
||||
Exit code: 0 = all OK, 1 = found files with non-standard metadata.
|
||||
建议扫描后丢给大模型分析一下然后再做下一步
|
||||
### fix_usd_metadata.py — Batch fix USD metadata
|
||||
|
||||
Fix `metersPerUnit` and `upAxis` in USD files, with backup/restore support:
|
||||
|
||||
```bash
|
||||
conda activate banana500
|
||||
|
||||
# Dry run — see what would be fixed, no changes made
|
||||
python migerate/fix_usd_metadata.py --root . --dry-run --skip curobo,robot
|
||||
|
||||
# Fix example assets only (default)
|
||||
python migerate/fix_usd_metadata.py --root workflows/simbox/example_assets
|
||||
|
||||
# Fix all USD in project, skip robot/curobo models
|
||||
python migerate/fix_usd_metadata.py --root . --skip curobo,robot
|
||||
|
||||
# Fix only Aligned_obj.usd files
|
||||
python migerate/fix_usd_metadata.py --root . --pattern "Aligned_obj.usd"
|
||||
|
||||
# Restore from backups (undo all fixes)
|
||||
python migerate/fix_usd_metadata.py --root workflows/simbox/example_assets --restore
|
||||
```
|
||||
|
||||
**Important:** Do NOT fix robot USD (curobo, split_aloha) — their metersPerUnit/upAxis are intentionally set for their own coordinate systems. Use `--skip` to exclude them.
|
||||
|
||||
## Notes
|
||||
|
||||
- `plan_single()` occasionally returns None for edge-case target poses (workspace boundary). This is normal and happens in IS 4.5.0 too.
|
||||
- Example HDR is only 1k resolution — production should use 4k/8k for better background quality.
|
||||
- Example `envmap_lib` only has 1 HDR file — add more for randomization diversity.
|
||||
- `non_recyclable_garbage/obj_0` does not exist in example assets (only obj_1~obj_11). Config changed to use `obj_1`.
|
||||
70
migerate/scan_usd_metadata.py
Normal file
70
migerate/scan_usd_metadata.py
Normal file
@@ -0,0 +1,70 @@
|
||||
"""
|
||||
Scan all USD files in the project and report their metersPerUnit / upAxis metadata.
|
||||
|
||||
Usage:
|
||||
python migerate/scan_usd_metadata.py [--root DIR]
|
||||
|
||||
Default root: current working directory (project root).
|
||||
"""
|
||||
import argparse
|
||||
import glob
|
||||
import os
|
||||
import sys
|
||||
|
||||
from pxr import Usd, UsdGeom
|
||||
|
||||
|
||||
def scan(root: str, target_mpu: float = 1.0, target_up: str = "Z"):
|
||||
usd_files = glob.glob(os.path.join(root, "**", "*.usd"), recursive=True)
|
||||
usd_files = [f for f in usd_files if not f.endswith(".bak")]
|
||||
usd_files.sort()
|
||||
|
||||
ok_files = []
|
||||
diff_files = []
|
||||
error_files = []
|
||||
|
||||
for fpath in usd_files:
|
||||
try:
|
||||
stage = Usd.Stage.Open(fpath)
|
||||
if stage is None:
|
||||
error_files.append((fpath, "Cannot open stage"))
|
||||
continue
|
||||
mpu = UsdGeom.GetStageMetersPerUnit(stage)
|
||||
up = UsdGeom.GetStageUpAxis(stage)
|
||||
rel = os.path.relpath(fpath, root)
|
||||
if mpu == target_mpu and up == target_up:
|
||||
ok_files.append((rel, mpu, up))
|
||||
else:
|
||||
diff_files.append((rel, mpu, up))
|
||||
except Exception as e:
|
||||
error_files.append((fpath, str(e)))
|
||||
|
||||
# Report
|
||||
print(f"Scanned {len(ok_files) + len(diff_files) + len(error_files)} USD files under: {root}")
|
||||
print(f" OK (mpu={target_mpu}, up={target_up}): {len(ok_files)}")
|
||||
print(f" Different: {len(diff_files)}")
|
||||
print(f" Errors: {len(error_files)}")
|
||||
|
||||
if diff_files:
|
||||
print(f"\n{'='*80}")
|
||||
print(f"Files with non-standard metadata (mpu != {target_mpu} or up != {target_up}):")
|
||||
print(f"{'='*80}")
|
||||
for rel, mpu, up in diff_files:
|
||||
print(f" mpu={mpu:<8} up={up:<4} {rel}")
|
||||
|
||||
if error_files:
|
||||
print(f"\n{'='*80}")
|
||||
print("Files with errors:")
|
||||
print(f"{'='*80}")
|
||||
for fpath, err in error_files:
|
||||
print(f" ERROR: {fpath} -> {err}")
|
||||
|
||||
return diff_files
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
parser = argparse.ArgumentParser(description="Scan USD files for metersPerUnit/upAxis metadata")
|
||||
parser.add_argument("--root", default=os.getcwd(), help="Root directory to scan (default: cwd)")
|
||||
args = parser.parse_args()
|
||||
diff = scan(args.root)
|
||||
sys.exit(1 if diff else 0)
|
||||
168
scripts/download_assets_direct.py
Normal file
168
scripts/download_assets_direct.py
Normal file
@@ -0,0 +1,168 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Direct HuggingFace dataset downloader that avoids full repo enumeration.
|
||||
Uses the HF tree API per-directory, then downloads files via HTTP.
|
||||
|
||||
Usage:
|
||||
python scripts/download_assets_direct.py [OPTIONS]
|
||||
|
||||
Options:
|
||||
--min Download only required scene assets
|
||||
--full Download all scene assets (default)
|
||||
--with-curobo Also download CuRobo package
|
||||
--with-drake Also download panda_drake package
|
||||
--local-dir DIR Where to save (default: current directory)
|
||||
--mirror URL HF mirror base URL (default: https://hf-mirror.com)
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
import time
|
||||
from pathlib import Path
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
|
||||
import requests
|
||||
|
||||
REPO_ID = "InternRobotics/InternData-A1"
|
||||
ASSET_PREFIX = "InternDataAssets"
|
||||
|
||||
def get_token():
|
||||
token = os.environ.get("HF_TOKEN", "")
|
||||
if not token:
|
||||
token_file = Path.home() / ".cache/huggingface/token"
|
||||
if token_file.exists():
|
||||
token = token_file.read_text().strip()
|
||||
return token
|
||||
|
||||
def list_dir(api_base, repo_id, path, token, session):
|
||||
"""List files in a specific directory via HF tree API (non-recursive for speed)."""
|
||||
url = f"{api_base}/api/datasets/{repo_id}/tree/main/{path}"
|
||||
headers = {"Authorization": f"Bearer {token}"} if token else {}
|
||||
files = []
|
||||
cursor = None
|
||||
while True:
|
||||
params = {"expand": "false", "recursive": "true", "limit": "1000"}
|
||||
if cursor:
|
||||
params["cursor"] = cursor
|
||||
r = session.get(url, headers=headers, params=params, timeout=30)
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
if isinstance(data, list):
|
||||
items = data
|
||||
cursor = None
|
||||
else:
|
||||
items = data.get("items", [])
|
||||
cursor = data.get("nextCursor")
|
||||
for item in items:
|
||||
if item.get("type") == "file":
|
||||
files.append(item["path"])
|
||||
if not cursor or not items:
|
||||
break
|
||||
return files
|
||||
|
||||
def download_file(mirror_base, repo_id, file_path, local_dir, token, session):
|
||||
"""Download a single file."""
|
||||
url = f"{mirror_base}/datasets/{repo_id}/resolve/main/{file_path}"
|
||||
headers = {"Authorization": f"Bearer {token}"} if token else {}
|
||||
local_path = Path(local_dir) / file_path
|
||||
local_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
if local_path.exists():
|
||||
return file_path, "skipped"
|
||||
for attempt in range(3):
|
||||
try:
|
||||
with session.get(url, headers=headers, stream=True, timeout=60) as r:
|
||||
r.raise_for_status()
|
||||
with open(local_path, "wb") as f:
|
||||
for chunk in r.iter_content(chunk_size=1024 * 1024):
|
||||
f.write(chunk)
|
||||
return file_path, "ok"
|
||||
except Exception as e:
|
||||
if attempt == 2:
|
||||
return file_path, f"error: {e}"
|
||||
time.sleep(2 ** attempt)
|
||||
|
||||
def download_dir(api_base, mirror_base, repo_id, path, local_dir, token, label, workers=8):
|
||||
print(f"[INFO] Listing {label} ...")
|
||||
session = requests.Session()
|
||||
try:
|
||||
files = list_dir(api_base, repo_id, path, token, session)
|
||||
except Exception as e:
|
||||
print(f"[ERROR] Failed to list {path}: {e}")
|
||||
return
|
||||
print(f"[INFO] Downloading {label} ({len(files)} files) ...")
|
||||
errors = []
|
||||
done = 0
|
||||
with ThreadPoolExecutor(max_workers=workers) as pool:
|
||||
futures = {pool.submit(download_file, mirror_base, repo_id, f, local_dir, token, session): f for f in files}
|
||||
for fut in as_completed(futures):
|
||||
fp, status = fut.result()
|
||||
done += 1
|
||||
if "error" in status:
|
||||
errors.append((fp, status))
|
||||
print(f" [{done}/{len(files)}] FAIL {fp}: {status}")
|
||||
else:
|
||||
print(f" [{done}/{len(files)}] {status.upper()} {fp}", end="\r")
|
||||
print()
|
||||
if errors:
|
||||
print(f"[WARN] {len(errors)} files failed in {label}")
|
||||
|
||||
def download_file_single(api_base, mirror_base, repo_id, file_path, local_dir, token, label):
|
||||
print(f"[INFO] Downloading {label} ...")
|
||||
session = requests.Session()
|
||||
fp, status = download_file(mirror_base, repo_id, file_path, local_dir, token, session)
|
||||
if "error" in status:
|
||||
print(f"[ERROR] {fp}: {status}")
|
||||
else:
|
||||
print(f"[INFO] {label} done.")
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("--min", action="store_true")
|
||||
parser.add_argument("--full", action="store_true")
|
||||
parser.add_argument("--with-curobo", action="store_true")
|
||||
parser.add_argument("--with-drake", action="store_true")
|
||||
parser.add_argument("--local-dir", default=".")
|
||||
parser.add_argument("--mirror", default="https://hf-mirror.com")
|
||||
parser.add_argument("--workers", type=int, default=8)
|
||||
args = parser.parse_args()
|
||||
|
||||
mode = "min" if args.min else "full"
|
||||
mirror = os.environ.get("HF_ENDPOINT", args.mirror).rstrip("/")
|
||||
api_base = mirror
|
||||
token = get_token()
|
||||
local_dir = args.local_dir
|
||||
|
||||
if not token:
|
||||
print("[WARN] No HF token found. Gated datasets will fail.")
|
||||
|
||||
print(f"[INFO] Mirror: {mirror}, mode={mode}, curobo={args.with_curobo}, drake={args.with_drake}")
|
||||
|
||||
base = f"{ASSET_PREFIX}/assets"
|
||||
|
||||
# Required scene assets
|
||||
print("[INFO] ========== Downloading required scene assets ==========")
|
||||
for d in ["background_textures", "envmap_lib", "floor_textures", "table_textures", "table0"]:
|
||||
download_dir(api_base, mirror, REPO_ID, f"{base}/{d}", local_dir, token, d, args.workers)
|
||||
download_file_single(api_base, mirror, REPO_ID, f"{base}/table_info.json", local_dir, token, "table_info.json")
|
||||
|
||||
# Full mode extras
|
||||
if mode == "full":
|
||||
print("[INFO] ========== Downloading all robots and tasks ==========")
|
||||
for robot in ["lift2", "franka", "frankarobotiq", "split_aloha_mid_360", "G1_120s"]:
|
||||
download_dir(api_base, mirror, REPO_ID, f"{base}/{robot}", local_dir, token, f"robot:{robot}", args.workers)
|
||||
for task in ["basic", "art", "long_horizon", "pick_and_place"]:
|
||||
download_dir(api_base, mirror, REPO_ID, f"{base}/{task}", local_dir, token, f"task:{task}", args.workers)
|
||||
|
||||
if args.with_curobo:
|
||||
print("[INFO] ========== Downloading CuRobo ==========")
|
||||
download_dir(api_base, mirror, REPO_ID, f"{ASSET_PREFIX}/curobo", local_dir, token, "curobo", args.workers)
|
||||
|
||||
if args.with_drake:
|
||||
print("[INFO] ========== Downloading panda_drake ==========")
|
||||
download_dir(api_base, mirror, REPO_ID, f"{ASSET_PREFIX}/panda_drake", local_dir, token, "panda_drake", args.workers)
|
||||
|
||||
print(f"[INFO] Done!")
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -29,7 +29,7 @@ tasks:
|
||||
objects:
|
||||
-
|
||||
name: pick_object_left
|
||||
path: task/sort_the_rubbish/non_recyclable_garbage/obj_0/Aligned_obj.usd
|
||||
path: task/sort_the_rubbish/non_recyclable_garbage/obj_1/Aligned_obj.usd
|
||||
target_class: RigidObject
|
||||
dataset: oo3d
|
||||
category: bottle
|
||||
|
||||
@@ -249,6 +249,12 @@ class TemplateController(BaseController):
|
||||
obstacles = self.usd_help.get_obstacles_from_stage(
|
||||
ignore_substring=self.ignore_substring, reference_prim_path=self.reference_prim_path
|
||||
).get_collision_check_world()
|
||||
# Diagnostic: print what curobo sees as the collision world
|
||||
n_cuboid = len(obstacles.cuboid) if obstacles.cuboid else 0
|
||||
n_mesh = len(obstacles.mesh) if obstacles.mesh else 0
|
||||
mesh_names = [m.name for m in obstacles.mesh] if obstacles.mesh else []
|
||||
print(f"[CUROBO_WORLD] cuboids={n_cuboid}, meshes={n_mesh}, "
|
||||
f"mesh_names={mesh_names[:5]}{'...' if n_mesh > 5 else ''}", flush=True)
|
||||
if self.motion_gen is not None:
|
||||
self.motion_gen.update_world(obstacles)
|
||||
self.world_cfg = obstacles
|
||||
@@ -277,6 +283,10 @@ class TemplateController(BaseController):
|
||||
self.T_world_base_init = get_relative_transform(
|
||||
get_prim_at_path(self.robot_base_path), get_prim_at_path(self.task.root_prim_path)
|
||||
)
|
||||
print(f"[TRANSFORM_DBG] robot_base_path={self.robot_base_path}", flush=True)
|
||||
print(f"[TRANSFORM_DBG] root_prim_path={self.task.root_prim_path}", flush=True)
|
||||
print(f"[TRANSFORM_DBG] T_world_base_init translation={self.T_world_base_init[:3, 3]}", flush=True)
|
||||
print(f"[TRANSFORM_DBG] T_world_base_init full=\n{self.T_world_base_init}", flush=True)
|
||||
self.T_world_ee_init = self.T_world_base_init @ self.T_base_ee_init
|
||||
self._ee_trans, self._ee_ori = self.get_ee_pose()
|
||||
self._ee_trans = self.tensor_args.to_device(self._ee_trans)
|
||||
|
||||
@@ -47,6 +47,21 @@ class RigidObject(RigidPrim):
|
||||
self.base_prim_path = prim_path
|
||||
rigid_prim_path = os.path.join(self.base_prim_path, cfg["prim_path_child"])
|
||||
self.mesh_prim_path = str(get_prim_at_path(rigid_prim_path).GetChildren()[0].GetPrimPath())
|
||||
# [LOAD_DBG] Print xformOps right after create_prim, before physics
|
||||
try:
|
||||
from pxr import UsdGeom
|
||||
_xf = UsdGeom.Xformable(get_prim_at_path(rigid_prim_path))
|
||||
for _op in _xf.GetOrderedXformOps():
|
||||
print(f"[LOAD_DBG] {cfg_name} after_create_prim: {_op.GetName()} = {_op.Get()}", flush=True)
|
||||
_l2w = _xf.ComputeLocalToWorldTransform(0)
|
||||
print(f"[LOAD_DBG] {cfg_name} after_create_prim l2w=({_l2w[3][0]:.6f}, {_l2w[3][1]:.6f}, {_l2w[3][2]:.6f})", flush=True)
|
||||
# Also check the USD file's own metersPerUnit
|
||||
_ref_stage = get_prim_at_path(prim_path).GetStage()
|
||||
_mpu = UsdGeom.GetStageMetersPerUnit(_ref_stage)
|
||||
_up = UsdGeom.GetStageUpAxis(_ref_stage)
|
||||
print(f"[LOAD_DBG] {cfg_name} stage metersPerUnit={_mpu} upAxis={_up}", flush=True)
|
||||
except Exception as _e:
|
||||
print(f"[LOAD_DBG] {cfg_name} error: {_e}", flush=True)
|
||||
super().__init__(prim_path=rigid_prim_path, name=cfg["name"], *args, **kwargs)
|
||||
|
||||
def get_observations(self):
|
||||
|
||||
@@ -284,8 +284,49 @@ class Pick(BaseSkill):
|
||||
if frame == "body":
|
||||
return self.T_obj_ee
|
||||
|
||||
T_world_obj = tf_matrix_from_pose(*self.pick_obj.get_local_pose())
|
||||
local_pose = self.pick_obj.get_local_pose()
|
||||
world_pose = self.pick_obj.get_world_pose()
|
||||
print(f"[PICK_DBG] prim_path={self.pick_obj.prim_path}", flush=True)
|
||||
print(f"[PICK_DBG] local_pose trans={local_pose[0]}", flush=True)
|
||||
print(f"[PICK_DBG] world_pose trans={world_pose[0]}", flush=True)
|
||||
print(f"[PICK_DBG] local_pose ori={local_pose[1]}", flush=True)
|
||||
print(f"[PICK_DBG] world_pose ori={world_pose[1]}", flush=True)
|
||||
parent_prim = get_prim_at_path(self.pick_obj.prim_path).GetParent()
|
||||
print(f"[PICK_DBG] parent_prim_path={parent_prim.GetPrimPath()}", flush=True)
|
||||
grandparent_prim = parent_prim.GetParent()
|
||||
print(f"[PICK_DBG] grandparent_prim_path={grandparent_prim.GetPrimPath()}", flush=True)
|
||||
try:
|
||||
from pxr import UsdGeom
|
||||
obj_prim = get_prim_at_path(self.pick_obj.prim_path)
|
||||
obj_xf = UsdGeom.Xformable(obj_prim)
|
||||
xform_ops = obj_xf.GetOrderedXformOps()
|
||||
print(f"[PICK_DBG] obj_xformOps_count={len(xform_ops)}", flush=True)
|
||||
for op in xform_ops:
|
||||
print(f"[PICK_DBG] obj_xformOp: {op.GetName()} val={op.Get()}", flush=True)
|
||||
obj_l2w = obj_xf.ComputeLocalToWorldTransform(0)
|
||||
print(f"[PICK_DBG] obj_USD_l2w=({obj_l2w[3][0]:.6f}, {obj_l2w[3][1]:.6f}, {obj_l2w[3][2]:.6f})", flush=True)
|
||||
parent_xf = UsdGeom.Xformable(parent_prim)
|
||||
parent_l2w = parent_xf.ComputeLocalToWorldTransform(0)
|
||||
print(f"[PICK_DBG] parent_l2w_translate=({parent_l2w[3][0]}, {parent_l2w[3][1]}, {parent_l2w[3][2]})", flush=True)
|
||||
gp_xf = UsdGeom.Xformable(grandparent_prim)
|
||||
gp_l2w = gp_xf.ComputeLocalToWorldTransform(0)
|
||||
print(f"[PICK_DBG] grandparent_l2w_translate=({gp_l2w[3][0]}, {gp_l2w[3][1]}, {gp_l2w[3][2]})", flush=True)
|
||||
stage = parent_prim.GetStage()
|
||||
mpu = UsdGeom.GetStageMetersPerUnit(stage)
|
||||
print(f"[PICK_DBG] stage_metersPerUnit={mpu}", flush=True)
|
||||
# Check get_local_pose source
|
||||
glp_method = type(self.pick_obj).get_local_pose
|
||||
print(f"[PICK_DBG] get_local_pose_from={glp_method.__qualname__}", flush=True)
|
||||
except Exception as e:
|
||||
import traceback
|
||||
print(f"[PICK_DBG] UsdGeom error: {e}", flush=True)
|
||||
traceback.print_exc()
|
||||
print(f"[PICK_DBG] root_prim_path={self.task.root_prim_path}", flush=True)
|
||||
print(f"[PICK_DBG] reference_prim_path={self.controller.reference_prim_path}", flush=True)
|
||||
T_world_obj = tf_matrix_from_pose(*local_pose)
|
||||
print(f"[PICK_DBG] T_world_obj translation={T_world_obj[:3,3]}", flush=True)
|
||||
T_world_ee = T_world_obj[None] @ self.T_obj_ee
|
||||
print(f"[PICK_DBG] T_world_ee[0] translation={T_world_ee[0,:3,3]}", flush=True)
|
||||
|
||||
if frame == "world":
|
||||
return T_world_ee
|
||||
@@ -294,8 +335,11 @@ class Pick(BaseSkill):
|
||||
T_world_base = get_relative_transform(
|
||||
get_prim_at_path(self.controller.reference_prim_path), get_prim_at_path(self.task.root_prim_path)
|
||||
)
|
||||
print(f"[PICK_DBG] T_world_base translation={T_world_base[:3,3]}", flush=True)
|
||||
T_base_world = np.linalg.inv(T_world_base)
|
||||
print(f"[PICK_DBG] T_base_world translation={T_base_world[:3,3]}", flush=True)
|
||||
T_base_ee = T_base_world[None] @ T_world_ee
|
||||
print(f"[PICK_DBG] T_base_ee[0] translation={T_base_ee[0,:3,3]}", flush=True)
|
||||
return T_base_ee
|
||||
|
||||
def get_contact(self, contact_threshold=0.0):
|
||||
|
||||
@@ -143,6 +143,8 @@ class Track(BaseSkill):
|
||||
def cal_table_2_base(self):
|
||||
tgt = self.task.fixtures["table"]
|
||||
bbox_tgt = compute_bbox(tgt.prim)
|
||||
print(f"[BBOX_DBG] table bbox min={list(bbox_tgt.min)}, max={list(bbox_tgt.max)}", flush=True)
|
||||
print(f"[BBOX_DBG] T_base_2_world translation={self.T_base_2_world[:3, 3]}", flush=True)
|
||||
table_center = (np.asarray(bbox_tgt.min) + np.asarray(bbox_tgt.max)) / 2
|
||||
tgt_z_max = bbox_tgt.max[2]
|
||||
table_center[2] = tgt_z_max
|
||||
|
||||
@@ -335,6 +335,14 @@ class BananaBaseTask(BaseTask):
|
||||
orientation = get_orientation(cfg.get("euler"), cfg.get("quaternion"))
|
||||
obj.set_local_pose(translation=cfg.get("translation"), orientation=orientation)
|
||||
obj.set_local_scale(cfg.get("scale", [1.0, 1.0, 1.0]))
|
||||
# [LOAD_DBG] Print after set_local_pose
|
||||
try:
|
||||
from pxr import UsdGeom
|
||||
_xf = UsdGeom.Xformable(get_prim_at_path(obj.prim_path))
|
||||
for _op in _xf.GetOrderedXformOps():
|
||||
print(f"[LOAD_DBG] {cfg['name']} after_set_local_pose: {_op.GetName()} = {_op.Get()}", flush=True)
|
||||
except Exception as _e:
|
||||
print(f"[LOAD_DBG] {cfg['name']} after_set_local_pose error: {_e}", flush=True)
|
||||
obj.set_visibility(cfg.get("visible", True))
|
||||
|
||||
# Extra behavior per type
|
||||
@@ -490,7 +498,7 @@ class BananaBaseTask(BaseTask):
|
||||
if cfg.get("apply_randomization", False):
|
||||
envmap_id = random.randint(0, len(envmap_hdr_path_list) - 1)
|
||||
intensity = random.uniform(cfg["intensity_range"][0], cfg["intensity_range"][1])
|
||||
rotation = [random.uniform(cfg["rotation_range"][0], cfg["rotation_range"][1]) for _ in range(3)]
|
||||
rotation = [0.0, 0.0, random.uniform(cfg["rotation_range"][0], cfg["rotation_range"][1])]
|
||||
else:
|
||||
envmap_id = 0
|
||||
intensity = 1000.0
|
||||
|
||||
Binary file not shown.
BIN
workflows/simbox/example_assets/table0/instance.usd.bak
Normal file
BIN
workflows/simbox/example_assets/table0/instance.usd.bak
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
workflows/simbox/tools/art/close_h/9748/usd/9748.usd.bak
Normal file
BIN
workflows/simbox/tools/art/close_h/9748/usd/9748.usd.bak
Normal file
Binary file not shown.
Binary file not shown.
BIN
workflows/simbox/tools/art/close_h/9748/usd/instance.usd.bak
Normal file
BIN
workflows/simbox/tools/art/close_h/9748/usd/instance.usd.bak
Normal file
Binary file not shown.
Binary file not shown.
BIN
workflows/simbox/tools/art/close_h_down/7130/usd/7130.usd.bak
Normal file
BIN
workflows/simbox/tools/art/close_h_down/7130/usd/7130.usd.bak
Normal file
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
BIN
workflows/simbox/tools/art/close_v/7265/usd/instance.usd.bak
Normal file
BIN
workflows/simbox/tools/art/close_v/7265/usd/instance.usd.bak
Normal file
Binary file not shown.
Binary file not shown.
BIN
workflows/simbox/tools/art/close_v/7265/usd/microwave_0.usd.bak
Normal file
BIN
workflows/simbox/tools/art/close_v/7265/usd/microwave_0.usd.bak
Normal file
Binary file not shown.
Binary file not shown.
BIN
workflows/simbox/tools/art/open_h/9912/usd/9912.usd.bak
Normal file
BIN
workflows/simbox/tools/art/open_h/9912/usd/9912.usd.bak
Normal file
Binary file not shown.
Binary file not shown.
BIN
workflows/simbox/tools/art/open_h/9912/usd/instance.usd.bak
Normal file
BIN
workflows/simbox/tools/art/open_h/9912/usd/instance.usd.bak
Normal file
Binary file not shown.
Binary file not shown.
BIN
workflows/simbox/tools/art/open_v/7265/usd/instance.usd.bak
Normal file
BIN
workflows/simbox/tools/art/open_v/7265/usd/instance.usd.bak
Normal file
Binary file not shown.
Binary file not shown.
BIN
workflows/simbox/tools/art/open_v/7265/usd/microwave_0.usd.bak
Normal file
BIN
workflows/simbox/tools/art/open_v/7265/usd/microwave_0.usd.bak
Normal file
Binary file not shown.
@@ -159,13 +159,44 @@ class SimBoxDualWorkFlow(NimbusWorkFlow):
|
||||
global_collision_paths,
|
||||
)
|
||||
self.world.reset()
|
||||
# [RESET_DBG] Check object xformOps right after world.reset()
|
||||
try:
|
||||
from pxr import UsdGeom
|
||||
for obj_name, obj in self.task.objects.items():
|
||||
_xf = UsdGeom.Xformable(get_prim_at_path(obj.prim_path))
|
||||
for _op in _xf.GetOrderedXformOps():
|
||||
if 'translate' in _op.GetName().lower():
|
||||
print(f"[RESET_DBG] {obj_name} after_world_reset: {_op.GetName()} = {_op.Get()}", flush=True)
|
||||
except Exception as _e:
|
||||
print(f"[RESET_DBG] after_world_reset error: {_e}", flush=True)
|
||||
|
||||
self.world.step(render=True)
|
||||
# [RESET_DBG] Check after first world.step(render=True)
|
||||
try:
|
||||
for obj_name, obj in self.task.objects.items():
|
||||
_xf = UsdGeom.Xformable(get_prim_at_path(obj.prim_path))
|
||||
for _op in _xf.GetOrderedXformOps():
|
||||
if 'translate' in _op.GetName().lower():
|
||||
print(f"[RESET_DBG] {obj_name} after_first_step: {_op.GetName()} = {_op.Get()}", flush=True)
|
||||
except Exception as _e:
|
||||
print(f"[RESET_DBG] after_first_step error: {_e}", flush=True)
|
||||
|
||||
self.controllers = self._initialize_controllers(self.task, self.task_cfg, self.world)
|
||||
self.skills = self._initialize_skills(self.task, self.task_cfg, self.controllers, self.world)
|
||||
|
||||
for _ in range(50):
|
||||
for _i in range(50):
|
||||
self._init_static_objects(self.task)
|
||||
self.world.step(render=False)
|
||||
# [RESET_DBG] Check at step 0, 1, 49
|
||||
if _i in (0, 1, 49):
|
||||
try:
|
||||
for obj_name, obj in self.task.objects.items():
|
||||
_xf = UsdGeom.Xformable(get_prim_at_path(obj.prim_path))
|
||||
for _op in _xf.GetOrderedXformOps():
|
||||
if 'translate' in _op.GetName().lower():
|
||||
print(f"[RESET_DBG] {obj_name} after_warmup_step_{_i}: {_op.GetName()} = {_op.Get()}", flush=True)
|
||||
except Exception as _e:
|
||||
print(f"[RESET_DBG] after_warmup_step_{_i} error: {_e}", flush=True)
|
||||
|
||||
self.logger = LmdbLogger(
|
||||
task_dir=self.task_cfg["data"]["task_dir"],
|
||||
|
||||
Reference in New Issue
Block a user