-
Notifications
You must be signed in to change notification settings - Fork 33
Description
Take frozen_lake_aico as example:
env_config.yaml is set as follows:
env1: env_name: frozenlake env_config: render_mode: vision train_size: 1 test_size: 1
run.sh is set as follows:
`
set -x
export VLLM_ATTENTION_BACKEND=XFORMERS
export PYTHONHASHSEED=0
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
python -m vagen.env.create_dataset
--yaml_path "$SCRIPT_DIR/env_config.yaml"
--train_path "data/frozenlake-vision-debug/train.parquet"
--test_path "data/frozenlake-vision-debug/test.parquet" \
python3 -m vagen.trainer.main_ppo
algorithm.adv_estimator=masked_gae
algorithm.high_level_gamma=0.95
data.train_files=data/frozenlake-vision-debug/train.parquet
data.val_files=data/frozenlake-vision-debug/test.parquet
data.train_batch_size=32
data.max_prompt_length=1024
data.max_response_length=128
data.max_trajectory_length=2048
data.image_key=images
data.truncation=left
actor_rollout_ref.model.path=/opt/nas/p/shared/checkpoints/Qwen2-VL-2B-Instruct
actor_rollout_ref.actor.optim.lr=1e-6
actor_rollout_ref.model.use_remove_padding=True
actor_rollout_ref.actor.ppo_mini_batch_size=16
actor_rollout_ref.actor.ppo_micro_batch_size_per_gpu=1
actor_rollout_ref.actor.use_kl_loss=False
actor_rollout_ref.actor.kl_loss_coef=0.001
actor_rollout_ref.actor.kl_loss_type=mse
actor_rollout_ref.model.enable_gradient_checkpointing=True
actor_rollout_ref.actor.fsdp_config.param_offload=False
actor_rollout_ref.actor.fsdp_config.optimizer_offload=False
actor_rollout_ref.rollout.log_prob_micro_batch_size_per_gpu=1
actor_rollout_ref.rollout.tensor_model_parallel_size=2
actor_rollout_ref.rollout.name=vllm
actor_rollout_ref.rollout.gpu_memory_utilization=0.2
actor_rollout_ref.rollout.enable_chunked_prefill=False
actor_rollout_ref.rollout.enforce_eager=False
actor_rollout_ref.rollout.free_cache_engine=False
actor_rollout_ref.rollout.n=1
actor_rollout_ref.ref.log_prob_micro_batch_size_per_gpu=1
actor_rollout_ref.ref.fsdp_config.param_offload=True
actor_rollout_ref.rollout.top_p=0.95
actor_rollout_ref.rollout.temperature=0.7
critic.optim.lr=1e-5
critic.model.use_remove_padding=True
critic.model.path=/opt/nas/p/shared/checkpoints/Qwen2-VL-2B-Instruct
critic.model.enable_gradient_checkpointing=True
critic.ppo_micro_batch_size_per_gpu=1
critic.model.fsdp_config.param_offload=False
critic.model.fsdp_config.optimizer_offload=False
algorithm.kl_ctrl.kl_coef=0.001
trainer.critic_warmup=0
trainer.logger=['console','wandb']
trainer.project_name='vagen_new'
trainer.experiment_name='aico_frozenlake_vision'
trainer.n_gpus_per_node=4
trainer.nnodes=1
trainer.save_freq=70
trainer.test_freq=20
trainer.total_training_steps=300
rollout_manager.max_turns=3
rollout_manager.window_size=5
rollout_manager.use_multi_turn_reward=False
rollout_manager.use_loss_mask=True
rollout_manager.use_gae_mask=True
trainer.val_before_train=True
trainer.val_generations_to_log_to_wandb=8
rollout_manager.n_trajectory=2
2>&1 | tee aico_frozenlake_vision.log
`
Error is occured as follows:
`
Creating parquet from Arrow format: 0%| | 0/1 [00:00<?, ?ba/s]
Creating parquet from Arrow format: 100%|██████████| 1/1 [00:00<00:00, 130.49ba/s]
Map: 0%| | 0/1 [00:00<?, ? examples/s]
Map: 100%|██████████| 1/1 [00:00<00:00, 277.66 examples/s]
Creating parquet from Arrow format: 0%| | 0/1 [00:00<?, ?ba/s]
Creating parquet from Arrow format: 100%|██████████| 1/1 [00:00<00:00, 2576.35ba/s]
Generating train split: 0 examples [00:00, ? examples/s]
Generating train split: 1 examples [00:00, 11.06 examples/s]
Generating test split: 0 examples [00:00, ? examples/s]
Generating test split: 1 examples [00:00, 40.51 examples/s]
Traceback (most recent call last):
File "/opt/nas/p/conda/envs/ui-vagen/lib/python3.10/runpy.py", line 196, in _run_module_as_main
return _run_code(code, main_globals, None,
File "/opt/nas/p/conda/envs/ui-vagen/lib/python3.10/runpy.py", line 86, in _run_code
exec(code, run_globals)
File "/opt/nas/p/zhAgent/VAGEN/vagen/env/create_dataset.py", line 144, in
print(train_dataset[i])
File "/opt/nas/p/conda/envs/ui-vagen/lib/python3.10/site-packages/datasets/arrow_dataset.py", line 2777, in getitem
return self._getitem(key)
File "/opt/nas/p/conda/envs/ui-vagen/lib/python3.10/site-packages/datasets/arrow_dataset.py", line 2761, in _getitem
pa_subtable = query_table(self._data, key, indices=self._indices)
File "/opt/nas/p/conda/envs/ui-vagen/lib/python3.10/site-packages/datasets/formatting/formatting.py", line 607, in query_table
_check_valid_index_key(key, size)
File "/opt/nas/p/conda/envs/ui-vagen/lib/python3.10/site-packages/datasets/formatting/formatting.py", line 547, in _check_valid_index_key
raise IndexError(f"Invalid key: {key} is out of bounds for size {size}")
IndexError: Invalid key: 1 is out of bounds for size 1
`
Traceback (most recent call last): File "/opt/nas/p/zhAgent/VAGEN/vagen/trainer/main_ppo.py", line 29, in main run_ppo(config, compute_score=compute_score) File "/opt/nas/p/zhAgent/VAGEN/vagen/trainer/main_ppo.py", line 37, in run_ppo ray.get(main_task.remote(config, compute_score)) File "/opt/nas/p/conda/envs/ui-vagen/lib/python3.10/site-packages/ray/_private/auto_init_hook.py", line 21, in auto_init_wrapper return fn(*args, **kwargs) File "/opt/nas/p/conda/envs/ui-vagen/lib/python3.10/site-packages/ray/_private/client_mode_hook.py", line 103, in wrapper return func(*args, **kwargs) File "/opt/nas/p/conda/envs/ui-vagen/lib/python3.10/site-packages/ray/_private/worker.py", line 2755, in get values, debugger_breakpoint = worker.get_objects(object_refs, timeout=timeout) File "/opt/nas/p/conda/envs/ui-vagen/lib/python3.10/site-packages/ray/_private/worker.py", line 906, in get_objects raise value.as_instanceof_cause() ray.exceptions.RayTaskError(AssertionError): �[36mray::main_task()�[39m (pid=12313, ip=10.80.4.106) File "/opt/nas/p/zhAgent/VAGEN/vagen/trainer/main_ppo.py", line 133, in main_task trainer = RayPPOTrainer(config=config, File "/opt/nas/p/zhAgent/VAGEN/vagen/trainer/ppo/ray_trainer.py", line 499, in __init__ self._create_dataloader() File "/opt/nas/p/zhAgent/VAGEN/vagen/trainer/ppo/ray_trainer.py", line 640, in _create_dataloader assert len(self.train_dataloader) >= 1 AssertionError