diff --git a/docs/source/getting_started/fetch_random_action_replica_cad_rt.mp4 b/docs/source/getting_started/fetch_random_action_replica_cad_rt.mp4 new file mode 100644 index 000000000..61c81f63f Binary files /dev/null and b/docs/source/getting_started/fetch_random_action_replica_cad_rt.mp4 differ diff --git a/docs/source/getting_started/images/replica_cad_interactive_rt.png b/docs/source/getting_started/images/replica_cad_interactive_rt.png new file mode 100644 index 000000000..93109d682 Binary files /dev/null and b/docs/source/getting_started/images/replica_cad_interactive_rt.png differ diff --git a/docs/source/getting_started/quickstart.md b/docs/source/getting_started/quickstart.md index 0a56769c0..58f0bc498 100644 --- a/docs/source/getting_started/quickstart.md +++ b/docs/source/getting_started/quickstart.md @@ -40,6 +40,65 @@ We support a wide range of controllers. Different controllers can have different Some environments require **downloading assets**. You can download all the assets by `python -m mani_skill2.utils.download_asset all` or download task-specific assets by `python -m mani_skill2.utils.download_asset ${ENV_ID}`. The assets will be downloaded to `./data/` by default, and you can also use the environment variable `MS2_ASSET_DIR` to specify this destination. Please refer to [Environments](../concepts/environments.md) for all supported environments, and which environments require downloading assets. + + +```bash +python -m mani_skill2.examples.demo_random_action -e PickCube-v0 # run headless +python -m mani_skill2.examples.demo_random_action -e PickCube-v0 --render-mode="human" # run with A GUI +``` + +We also have demos for simulations of scenes like ReplicaCAD, which can be run by doing + +```bash +python -m mani_skill2.utils.download_asset "ReplicaCAD" +python -m mani_skill2.examples.demo_random_action.py -e "ReplicaCAD_SceneManipulation-v1" --render-mode="rgb_array" --record-dir="videos" # run headless and save video +python -m mani_skill2.examples.demo_random_action.py -e "ReplicaCAD_SceneManipulation-v1" --render-mode="human" # run with GUI (recommended!) +``` + +To turn ray-tracing on for more photo-realistic rendering, you can do `gym.make(env_id, shader_dir="rt")`, or run + +```bash +python -m mani_skill2.examples.demo_random_action.py -e "ReplicaCAD_SceneManipulation-v1" --render-mode="rgb_array" --record-dir="videos" --shader="rt" # will be slow due to ray tracing +python -m mani_skill2.examples.demo_random_action.py -e "ReplicaCAD_SceneManipulation-v1" --render-mode="human" --shader="rt-fast" # faster ray-tracing option but lower quality +``` + +For more details on rendering see TODO (stao) + +## Interactive Play + +TODO (stao): Add demo of teleoperation from camera + +We provide an example script to interactively play with our environments. A display is required to show the GUI + +```bash +# PickCube-v0 can be replaced with other environment id. +python -m mani_skill2.examples.demo_manual_control -e PickCube-v0 # runs in headless mode +``` + +Keyboard controls: + +- Press `i` (or `j`, `k`, `l`, `u`, `o`) to move the end-effector. +- Press any key between `1` to `6` to rotate the end-effector. +- Press `f` or `g` to open or close the gripper. +- Press `w` (or `a`, `s`, `d`) to translate the base if the robot is mobile. Press `q` or `e` to rotate the base. Press `z` or `x` to lift the torso. +- Press `esc` to close the viewer and exit the program. + +To enable an interactive viewer supported by SAPIEN, you can add `--enable-sapien-viewer`. The interactive SAPIEN viewer is more powerful for debugging (e.g., checking collision shapes, getting current poses). There will be two windows: an OpenCV window and a SAPIEN (GL) window. Pressing `0` on the focused window can switch the control to the other one. + +```{image} images/OpenCV-viewer.png +--- +height: 256px +alt: OpenCV viewer +--- +``` + +```{image} images/SAPIEN-viewer.png +--- +height: 256px +alt: SAPIEN viewer +--- +``` + ## GPU Parallelized/Vectorized Environments ManiSkill is powered by SAPIEN which supports GPU parallelized physics simulation and GPU parallelized rendering. This enables achieving 200,000+ state-based simulation FPS and 10,000+ FPS with rendering on a single 4090 GPU. For full benchmarking results see [this page](../additional_resources/performance_benchmarking) diff --git a/mani_skill2/envs/scenes/__init__.py b/mani_skill2/envs/scenes/__init__.py index 1c69ac41f..db9f5f9cf 100644 --- a/mani_skill2/envs/scenes/__init__.py +++ b/mani_skill2/envs/scenes/__init__.py @@ -23,6 +23,6 @@ for k, scene_builder in scene_builders.items(): register_env( f"{k}_SceneManipulation-v1", - max_episode_steps=None, - scene_builder_cls=scene_builder, + max_episode_steps=200, + scene_builder_cls=k, )(SceneManipulationEnv) diff --git a/mani_skill2/examples/demo_random_action.py b/mani_skill2/examples/demo_random_action.py index 177f2d4e2..f5fd2d2f3 100644 --- a/mani_skill2/examples/demo_random_action.py +++ b/mani_skill2/examples/demo_random_action.py @@ -14,6 +14,7 @@ def parse_args(args=None): parser.add_argument("--reward-mode", type=str) parser.add_argument("-c", "--control-mode", type=str) parser.add_argument("--render-mode", type=str) + parser.add_argument("--shader", default="default", type=str, help="Change shader used for rendering. Default is 'default' which is very fast. Can also be 'rt' for ray tracing and generating photo-realistic renders. Can also be 'rt-fast' for a faster but lower quality ray-traced renderer") parser.add_argument("--record-dir", type=str) parser.add_argument("--quiet", action="store_true", help="Disable verbose output.") parser.add_argument( @@ -47,6 +48,7 @@ def main(args): reward_mode=args.reward_mode, control_mode=args.control_mode, render_mode=args.render_mode, + shader_dir=args.shader, **args.env_kwargs )