[ { "idx": 1, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1/loser.jpg", "save_id": 1, "prompt_en": "Add an Adidas logo to the side of the white truck box.", "prompt_cn": "在卡车侧面的白色货厢上添加一个阿迪达斯标志。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 2, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/2/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/2/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/2/loser.jpg", "save_id": 1, "prompt_en": "Add an Adidas logo to the side of the white truck box.", "prompt_cn": "在卡车侧面的白色货厢上添加一个阿迪达斯标志。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 4, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/4/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/4/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/4/loser.jpg", "save_id": 4, "prompt_en": "Place a silver metal pen holder containing several black pens to the left of the computer.", "prompt_cn": "放置一只银色金属笔筒装几支黑笔在电脑的左边。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 5, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/5/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/5/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/5/loser.jpg", "save_id": 4, "prompt_en": "Place a silver metal pen holder containing several black pens to the left of the computer.", "prompt_cn": "放置一只银色金属笔筒装几支黑笔在电脑的左边。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 6, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/6/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/6/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/6/loser.jpg", "save_id": 4, "prompt_en": "Place a silver metal pen holder containing several black pens to the left of the computer.", "prompt_cn": "放置一只银色金属笔筒装几支黑笔在电脑的左边。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 7, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/7/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/7/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/7/loser.jpg", "save_id": 4, "prompt_en": "Place a silver metal pen holder containing several black pens to the left of the computer.", "prompt_cn": "放置一只银色金属笔筒装几支黑笔在电脑的左边。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 9, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/9/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/9/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/9/loser.jpg", "save_id": 775, "prompt_en": "Turn the dining chair into a wooden one.", "prompt_cn": "将这把餐椅变成一把木制餐椅。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 10, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/10/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/10/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/10/loser.jpg", "save_id": 775, "prompt_en": "Turn the dining chair into a wooden one.", "prompt_cn": "将这把餐椅变成一把木制餐椅。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 12, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/4.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/12/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/12/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/12/loser.jpg", "save_id": 776, "prompt_en": "Change the pillows to linen material.", "prompt_cn": "将枕头更改为亚麻材质。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 13, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/4.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/13/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/13/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/13/loser.jpg", "save_id": 776, "prompt_en": "Change the pillows to linen material.", "prompt_cn": "将枕头更改为亚麻材质。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 15, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/8.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/15/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/15/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/15/loser.jpg", "save_id": 10, "prompt_en": "Add a pink balloon to the panda’s hand on the far right.", "prompt_cn": "在最右边的熊猫手上加入一个粉色的气球。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 16, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/8.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/16/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/16/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/16/loser.jpg", "save_id": 10, "prompt_en": "Add a pink balloon to the panda’s hand on the far right.", "prompt_cn": "在最右边的熊猫手上加入一个粉色的气球。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 20, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/12.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/20/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/20/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/20/loser.jpg", "save_id": 783, "prompt_en": "Change the table material to marble.", "prompt_cn": "将桌子的材质更改为大理石。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 22, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/16.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/22/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/22/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/22/loser.jpg", "save_id": 786, "prompt_en": "Turn the table into a metal one.", "prompt_cn": "将这张桌子变成一张金属桌子。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 23, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/16.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/23/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/23/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/23/loser.jpg", "save_id": 786, "prompt_en": "Turn the table into a metal one.", "prompt_cn": "将这张桌子变成一张金属桌子。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 26, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/18.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/26/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/26/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/26/loser.jpg", "save_id": 788, "prompt_en": "Make the side table ceramic.", "prompt_cn": "将茶几的材质更改为陶瓷。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 27, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/18.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/27/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/27/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/27/loser.jpg", "save_id": 788, "prompt_en": "Make the side table ceramic.", "prompt_cn": "将茶几的材质更改为陶瓷。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 28, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/18.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/28/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/28/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/28/loser.jpg", "save_id": 788, "prompt_en": "Make the side table ceramic.", "prompt_cn": "将茶几的材质更改为陶瓷。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 31, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/20.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/31/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/31/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/31/loser.jpg", "save_id": 20, "prompt_en": "Add a light wood fruit basket containing fresh apples on the table.", "prompt_cn": "在桌子上放置一个浅色木质水果篮装着新鲜苹果", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 32, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/20.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/32/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/32/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/32/loser.jpg", "save_id": 20, "prompt_en": "Add a light wood fruit basket containing fresh apples on the table.", "prompt_cn": "在桌子上放置一个浅色木质水果篮装着新鲜苹果", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 33, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/20.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/33/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/33/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/33/loser.jpg", "save_id": 20, "prompt_en": "Add a light wood fruit basket containing fresh apples on the table.", "prompt_cn": "在桌子上放置一个浅色木质水果篮装着新鲜苹果", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 35, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/20.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/35/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/35/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/35/loser.jpg", "save_id": 20, "prompt_en": "Add a light wood fruit basket containing fresh apples on the table.", "prompt_cn": "在桌子上放置一个浅色木质水果篮装着新鲜苹果", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 36, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/36/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/36/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/36/loser.jpg", "save_id": 3, "prompt_en": "Add a Los Angeles Lakers Kobe Bryant jersey inside the wooden frame.", "prompt_cn": "在墙上挂着的木质画框内,添加一件洛杉矶湖人队科比·布莱恩特的球衣。 ", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 37, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/37/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/37/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/37/loser.jpg", "save_id": 3, "prompt_en": "Add a Los Angeles Lakers Kobe Bryant jersey inside the wooden frame.", "prompt_cn": "在墙上挂着的木质画框内,添加一件洛杉矶湖人队科比·布莱恩特的球衣。 ", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 38, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/24.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/38/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/38/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/38/loser.jpg", "save_id": 25, "prompt_en": "Place a cutting board on the countertop next to the sink on the right", "prompt_cn": "在右侧水槽旁的操作台上放一个砧板", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 39, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/24.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/39/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/39/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/39/loser.jpg", "save_id": 25, "prompt_en": "Place a cutting board on the countertop next to the sink on the right", "prompt_cn": "在右侧水槽旁的操作台上放一个砧板", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 40, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/25.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/40/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/40/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/40/loser.jpg", "save_id": 26, "prompt_en": "Place a basketball on the empty ground to the left of the court.", "prompt_cn": "在篮球场左侧空地上放一个篮球。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 41, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/25.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/41/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/41/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/41/loser.jpg", "save_id": 26, "prompt_en": "Place a basketball on the empty ground to the left of the court.", "prompt_cn": "在篮球场左侧空地上放一个篮球。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 45, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/33.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/45/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/45/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/45/loser.jpg", "save_id": 34, "prompt_en": "Replace the bedside lamp on the right nightstand with an alarm clock.", "prompt_cn": "将右侧床头柜的台灯换为一个闹钟。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 46, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/33.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/46/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/46/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/46/loser.jpg", "save_id": 34, "prompt_en": "Replace the bedside lamp on the right nightstand with an alarm clock.", "prompt_cn": "将右侧床头柜的台灯换为一个闹钟。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 48, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/34.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/48/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/48/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/48/loser.jpg", "save_id": 35, "prompt_en": "Add a kitten on the stool next to the dressing table chair.", "prompt_cn": "在梳妆台椅子旁的凳子上加入一只小猫。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 49, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/34.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/49/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/49/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/49/loser.jpg", "save_id": 35, "prompt_en": "Add a kitten on the stool next to the dressing table chair.", "prompt_cn": "在梳妆台椅子旁的凳子上加入一只小猫。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 51, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/35.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/51/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/51/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/51/loser.jpg", "save_id": 37, "prompt_en": "Add a Starbucks iced latte with the logo visible on the table.", "prompt_cn": "在桌子上加入一杯带标志的星巴克冰拿铁 。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 52, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/35.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/52/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/52/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/52/loser.jpg", "save_id": 37, "prompt_en": "Add a Starbucks iced latte with the logo visible on the table.", "prompt_cn": "在桌子上加入一杯带标志的星巴克冰拿铁 。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 53, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/53/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/53/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/53/loser.jpg", "save_id": 112, "prompt_en": "Add another identical coffee mug next to it.", "prompt_cn": "在现有咖啡杯旁边再添加一个相同的咖啡杯。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 54, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/54/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/54/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/54/loser.jpg", "save_id": 112, "prompt_en": "Add another identical coffee mug next to it.", "prompt_cn": "在现有咖啡杯旁边再添加一个相同的咖啡杯。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 55, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/2.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/55/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/55/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/55/loser.jpg", "save_id": 113, "prompt_en": "Place an identical armchair opposite the existing one.", "prompt_cn": "在现有的扶手椅对面放置一把一模一样的扶手椅。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 57, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/2.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/57/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/57/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/57/loser.jpg", "save_id": 113, "prompt_en": "Place an identical armchair opposite the existing one.", "prompt_cn": "在现有的扶手椅对面放置一把一模一样的扶手椅。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 59, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/2.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/59/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/59/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/59/loser.jpg", "save_id": 113, "prompt_en": "Place an identical armchair opposite the existing one.", "prompt_cn": "在现有的扶手椅对面放置一把一模一样的扶手椅。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 60, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/4.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/60/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/60/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/60/loser.jpg", "save_id": 115, "prompt_en": "Add an identical streetlight on the other side of the road..", "prompt_cn": "在道路的对面添加一盏相同的路灯。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 61, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/6.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/61/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/61/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/61/loser.jpg", "save_id": 117, "prompt_en": "Add another identical picnic basket on the opposite corner.", "prompt_cn": "在毯子对角的另一角落添加一个相同的野餐篮。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 62, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/6.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/62/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/62/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/62/loser.jpg", "save_id": 117, "prompt_en": "Add another identical picnic basket on the opposite corner.", "prompt_cn": "在毯子对角的另一角落添加一个相同的野餐篮。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 65, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/6.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/65/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/65/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/65/loser.jpg", "save_id": 117, "prompt_en": "Add another identical picnic basket on the opposite corner.", "prompt_cn": "在毯子对角的另一角落添加一个相同的野餐篮。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 66, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/8.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/66/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/66/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/66/loser.jpg", "save_id": 119, "prompt_en": "Add an identical burger next to the existing one.", "prompt_cn": "在现在的汉堡旁边加入一个和现在汉堡一模一样的汉堡。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 67, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/8.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/67/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/67/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/67/loser.jpg", "save_id": 119, "prompt_en": "Add an identical burger next to the existing one.", "prompt_cn": "在现在的汉堡旁边加入一个和现在汉堡一模一样的汉堡。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 68, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/10.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/68/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/68/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/68/loser.jpg", "save_id": 121, "prompt_en": "Place a pillow identical to the one on the left side of the sofa in the right corner of the sofa.”", "prompt_cn": "在沙发右侧角落放置一个和沙发左侧相同的抱枕。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 69, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/10.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/69/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/69/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/69/loser.jpg", "save_id": 121, "prompt_en": "Place a pillow identical to the one on the left side of the sofa in the right corner of the sofa.”", "prompt_cn": "在沙发右侧角落放置一个和沙发左侧相同的抱枕。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 70, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/10.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/70/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/70/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/70/loser.jpg", "save_id": 121, "prompt_en": "Place a pillow identical to the one on the left side of the sofa in the right corner of the sofa.”", "prompt_cn": "在沙发右侧角落放置一个和沙发左侧相同的抱枕。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 72, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/12.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/72/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/72/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/72/loser.jpg", "save_id": 123, "prompt_en": "Add the matching boot next to it to complete the pair.", "prompt_cn": "在现有靴子旁边添加一只与之匹配的靴子,以组成一双。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 73, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/12.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/73/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/73/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/73/loser.jpg", "save_id": 123, "prompt_en": "Add the matching boot next to it to complete the pair.", "prompt_cn": "在现有靴子旁边添加一只与之匹配的靴子,以组成一双。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 76, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/14.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/76/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/76/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/76/loser.jpg", "save_id": 125, "prompt_en": "Place another identical purple yoga mat next to it.", "prompt_cn": "在现有的紫色瑜伽垫旁边再放置一块相同的紫色瑜伽垫。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 77, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/15.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/77/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/77/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/77/loser.jpg", "save_id": 126, "prompt_en": "Place an identical candle holder on the right end of the shelf.", "prompt_cn": "在木板架的右端放置一个相同的烛台。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 79, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/15.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/79/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/79/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/79/loser.jpg", "save_id": 126, "prompt_en": "Place an identical candle holder on the right end of the shelf.", "prompt_cn": "在木板架的右端放置一个相同的烛台。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 81, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/15.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/81/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/81/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/81/loser.jpg", "save_id": 126, "prompt_en": "Place an identical candle holder on the right end of the shelf.", "prompt_cn": "在木板架的右端放置一个相同的烛台。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 82, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/16.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/82/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/82/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/82/loser.jpg", "save_id": 127, "prompt_en": "Hang another identical red lantern next to it.", "prompt_cn": "在它旁边再挂一个一模一样的红灯笼。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 84, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/16.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/84/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/84/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/84/loser.jpg", "save_id": 127, "prompt_en": "Hang another identical red lantern next to it.", "prompt_cn": "在它旁边再挂一个一模一样的红灯笼。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 86, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/16.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/86/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/86/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/86/loser.jpg", "save_id": 127, "prompt_en": "Hang another identical red lantern next to it.", "prompt_cn": "在它旁边再挂一个一模一样的红灯笼。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 87, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/17.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/87/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/87/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/87/loser.jpg", "save_id": 128, "prompt_en": "Build another identical snowman next to the first one.", "prompt_cn": "在第一个雪人旁边再堆一个相同的雪人。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 89, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/20.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/89/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/89/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/89/loser.jpg", "save_id": 131, "prompt_en": "Copy an identical game console next to the current one", "prompt_cn": "在现在游戏机的旁边复制一个相同的游戏机。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 90, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/90/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/90/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/90/loser.jpg", "save_id": 899, "prompt_en": "Set the bicycle upright.", "prompt_cn": "将自行车摆放成直立状态。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 91, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/91/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/91/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/91/loser.jpg", "save_id": 899, "prompt_en": "Set the bicycle upright.", "prompt_cn": "将自行车摆放成直立状态。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 92, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/4.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/92/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/92/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/92/loser.jpg", "save_id": 900, "prompt_en": "Make the Minion hold a bouquet of flowers.", "prompt_cn": "让小黄人拿着一束花。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 93, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/7.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/93/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/93/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/93/loser.jpg", "save_id": 903, "prompt_en": "Have the girl who is taking the phone photo make a peace sign with her free hand.", "prompt_cn": "让正在用手机拍照的女生,用另一只空着的手做出一个“耶”的手势。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 94, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/7.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/94/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/94/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/94/loser.jpg", "save_id": 903, "prompt_en": "Have the girl who is taking the phone photo make a peace sign with her free hand.", "prompt_cn": "让正在用手机拍照的女生,用另一只空着的手做出一个“耶”的手势。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 95, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/11.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/95/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/95/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/95/loser.jpg", "save_id": 907, "prompt_en": "Have the girl grip the lat pulldown machine with her hands.", "prompt_cn": "让这个女孩用手握住高位下拉的器械。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 96, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Casual_Reason/1.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/96/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/96/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/96/loser.jpg", "save_id": 1227, "prompt_en": "Remove the first book beneath the telescope.", "prompt_cn": "移除望远镜下方最上面的那本书。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 98, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/105.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/98/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/98/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/98/loser.jpg", "save_id": 833, "prompt_en": "Keep the fruits and bowl unchanged, and change the background to an indoor kitchen countertop with cabinets.", "prompt_cn": "保持水果和碗不变,将背景改为室内厨房料理台和橱柜。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 99, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/114.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/99/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/99/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/99/loser.jpg", "save_id": 835, "prompt_en": "Keep the man and buildings unchanged, and change the background sky to a starry night with the Milky Way visible.", "prompt_cn": "保持男子和建筑不变,将背景天空改为可见银河的星空夜景。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 100, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/121.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/100/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/100/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/100/loser.jpg", "save_id": 552, "prompt_en": "Change the jockey’s red helmet on the leading horse to bright neon green.", "prompt_cn": "将前方赛马骑手的红色头盔改为鲜亮的荧光绿色。\n", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 101, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/121.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/101/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/101/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/101/loser.jpg", "save_id": 552, "prompt_en": "Change the jockey’s red helmet on the leading horse to bright neon green.", "prompt_cn": "将前方赛马骑手的红色头盔改为鲜亮的荧光绿色。\n", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 102, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/170.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/102/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/102/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/102/loser.jpg", "save_id": 561, "prompt_en": "Change the blue jumpsuit to pink.", "prompt_cn": "将蓝色连体衣变成粉色的。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 104, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/177.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/104/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/104/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/104/loser.jpg", "save_id": 563, "prompt_en": "Change the color of the shoes to sky blue.", "prompt_cn": "将鞋子的颜色改为天空蓝。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 105, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/177.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/105/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/105/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/105/loser.jpg", "save_id": 563, "prompt_en": "Change the color of the shoes to sky blue.", "prompt_cn": "将鞋子的颜色改为天空蓝。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 106, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/177.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/106/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/106/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/106/loser.jpg", "save_id": 563, "prompt_en": "Change the color of the shoes to sky blue.", "prompt_cn": "将鞋子的颜色改为天空蓝。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 107, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/241.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/107/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/107/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/107/loser.jpg", "save_id": 570, "prompt_en": "Change the black sedan on the right to a bright solid sky blue.", "prompt_cn": "将右侧黑色轿车车身改成亮丽的天蓝色", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 108, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/201.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/108/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/108/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/108/loser.jpg", "save_id": 300, "prompt_en": "Remove the 'Stop' sign and the pink helmet, and add a skateboard on the road.", "prompt_cn": "移除‘Stop’的标志和粉色的头盔,在路上加入一个滑板。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 109, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/205.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/109/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/109/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/109/loser.jpg", "save_id": 301, "prompt_en": "Add the black text 'Machine Learning' in the center of the laptop screen, remove the circular ornament on the left, place an open heavy dictionary in front of the laptop, and remove the stone in the lower-left corner of the desk.", "prompt_cn": "在笔记本电脑屏幕中央添加一句黑色文字‘Machine Learning’,将左侧圆环形摆件移除,在笔记本电脑前面加入一本翻开的厚重词典,移除桌面左前方的石头。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 110, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/205.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/110/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/110/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/110/loser.jpg", "save_id": 301, "prompt_en": "Add the black text 'Machine Learning' in the center of the laptop screen, remove the circular ornament on the left, place an open heavy dictionary in front of the laptop, and remove the stone in the lower-left corner of the desk.", "prompt_cn": "在笔记本电脑屏幕中央添加一句黑色文字‘Machine Learning’,将左侧圆环形摆件移除,在笔记本电脑前面加入一本翻开的厚重词典,移除桌面左前方的石头。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 111, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/303.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/111/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/111/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/111/loser.jpg", "save_id": 306, "prompt_en": "Have the girl pick up the water bottle in the center and drink from it, change the pants to the same color as the top, and add a white towel around her neck.", "prompt_cn": "让这个女生拿起中间的水瓶喝水,将裤子改为与上衣相同的颜色,并在脖子上挂一条白色毛巾。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 112, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/304.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/112/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/112/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/112/loser.jpg", "save_id": 307, "prompt_en": "Add a red scarf to the boy, place a sticky note on the table with the cola, have him pick up the cola, and change his short-sleeved shirt to white.", "prompt_cn": "给这个男孩戴上一条红领巾,在放可乐的桌子上添加一个便利贴,让他拿起桌子上的可乐,并将短袖改为白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 113, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/304.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/113/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/113/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/113/loser.jpg", "save_id": 307, "prompt_en": "Add a red scarf to the boy, place a sticky note on the table with the cola, have him pick up the cola, and change his short-sleeved shirt to white.", "prompt_cn": "给这个男孩戴上一条红领巾,在放可乐的桌子上添加一个便利贴,让他拿起桌子上的可乐,并将短袖改为白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 115, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/309.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/115/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/115/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/115/loser.jpg", "save_id": 311, "prompt_en": "Add a park sign next to the path with the text 'Sunset Research Park', change the bench to white and add a person reading on it, and place a tent on the lawn.", "prompt_cn": "在小路旁添加一块公园指示牌,牌子上写‘Sunset Research Park’,将长椅改为白色并在上面添加一位读书的人,在草坪上加入一个帐篷。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 116, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/311.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/116/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/116/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/116/loser.jpg", "save_id": 312, "prompt_en": "Remove the small potted plant from the table, change the chandelier color to warm yellow, remove the blue plastic box, and add a wooden sign on the wall with the text '努力学习' in a warm, cute handwritten style.", "prompt_cn": "移除桌上的小盆栽,将吊灯的颜色改为暖黄色,移除蓝色塑料箱,并在墙上添加一块写有‘努力学习’的木质牌子,字体为温暖可爱的手写风格。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 117, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/313.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/117/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/117/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/117/loser.jpg", "save_id": 314, "prompt_en": "Add a small dog walking beside the surfer and change the surfboard color to bright yellow.", "prompt_cn": "在冲浪者旁边添加一只小狗,并将冲浪板颜色改为亮黄色", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 118, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/313.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/118/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/118/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/118/loser.jpg", "save_id": 314, "prompt_en": "Add a small dog walking beside the surfer and change the surfboard color to bright yellow.", "prompt_cn": "在冲浪者旁边添加一只小狗,并将冲浪板颜色改为亮黄色", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 120, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/320.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/120/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/120/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/120/loser.jpg", "save_id": 318, "prompt_en": "Remove the shoe that can be worn on the left foot in the image, remove the tennis ball farthest from the camera, change the background to light blue, and write 'Practice' in green chalk-style text above the tennis ball.", "prompt_cn": "移除图中鞋子可以穿在左脚上的鞋,移除离镜头最远的网球,将背景改为淡蓝色,并在网球上方用绿色粉笔风格文字写上‘Practice’。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 121, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Complex_paint/CN/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/121/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/121/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/121/loser.jpg", "save_id": 428, "prompt_en": "", "prompt_cn": "", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 122, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/emotion_change/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/122/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/122/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/122/loser.jpg", "save_id": 1179, "prompt_en": "Replace the smiling balloon with a frowning one.", "prompt_cn": "将微笑的气球替换为一个皱眉的气球。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 123, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/emotion_change/4.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/123/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/123/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/123/loser.jpg", "save_id": 1181, "prompt_en": "Make the boy sink into deep thought.", "prompt_cn": "让这个男孩看起来正在深思。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 124, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/emotion_change/6.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/124/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/124/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/124/loser.jpg", "save_id": 1183, "prompt_en": "Make the baby laughing happily.", "prompt_cn": "让婴儿开心地笑起来。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 125, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/emotion_change/13.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/125/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/125/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/125/loser.jpg", "save_id": 1187, "prompt_en": "Make him look relieved and happy.", "prompt_cn": "让他看起来放松、如释重负并且开心。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 126, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/emotion_change/24.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/126/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/126/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/126/loser.jpg", "save_id": 1193, "prompt_en": "Change the expression to a sad face.", "prompt_cn": "将表情更改为悲伤的脸。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 128, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/emotion_change/32.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/128/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/128/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/128/loser.jpg", "save_id": 1199, "prompt_en": "Make him look shocked with his jaw dropped.", "prompt_cn": "让他看起来很震惊,张大嘴巴下巴下垂。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 129, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/emotion_change/33.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/129/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/129/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/129/loser.jpg", "save_id": 1200, "prompt_en": "Make her look surprised by what she is reading.", "prompt_cn": "让她看起来对自己正在阅读的内容感到惊讶。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 130, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/emotion_change/33.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/130/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/130/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/130/loser.jpg", "save_id": 1200, "prompt_en": "Make her look surprised by what she is reading.", "prompt_cn": "让她看起来对自己正在阅读的内容感到惊讶。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 131, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/10.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/131/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/131/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/131/loser.jpg", "save_id": 692, "prompt_en": "Extract the airplane model from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的飞机模型,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 132, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/10.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/132/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/132/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/132/loser.jpg", "save_id": 692, "prompt_en": "Extract the airplane model from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的飞机模型,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 133, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/10.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/133/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/133/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/133/loser.jpg", "save_id": 692, "prompt_en": "Extract the airplane model from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的飞机模型,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 135, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/10.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/135/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/135/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/135/loser.jpg", "save_id": 692, "prompt_en": "Extract the airplane model from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的飞机模型,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 136, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/10.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/136/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/136/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/136/loser.jpg", "save_id": 692, "prompt_en": "Extract the airplane model from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的飞机模型,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 137, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/38.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/137/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/137/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/137/loser.jpg", "save_id": 694, "prompt_en": "Extract the bench from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的长椅,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 138, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/38.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/138/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/138/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/138/loser.jpg", "save_id": 694, "prompt_en": "Extract the bench from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的长椅,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 139, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/39.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/139/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/139/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/139/loser.jpg", "save_id": 695, "prompt_en": "Extract the boy riding the bicycle and his bicycle, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中骑自行车的男孩及其自行车,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 140, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/47.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/140/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/140/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/140/loser.jpg", "save_id": 696, "prompt_en": "Extract the hanging clock on the side of the building, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取建筑物侧面的挂钟,保持其位置、朝向和姿态不变,并将背景替换为纯白色。 ", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 141, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/47.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/141/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/141/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/141/loser.jpg", "save_id": 696, "prompt_en": "Extract the hanging clock on the side of the building, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取建筑物侧面的挂钟,保持其位置、朝向和姿态不变,并将背景替换为纯白色。 ", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 142, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/53.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/142/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/142/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/142/loser.jpg", "save_id": 697, "prompt_en": "Extract the hat from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图片中的帽子,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 143, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/53.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/143/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/143/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/143/loser.jpg", "save_id": 697, "prompt_en": "Extract the hat from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图片中的帽子,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 144, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/59.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/144/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/144/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/144/loser.jpg", "save_id": 698, "prompt_en": "Extract the yellow lounge chair, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出这个黄色的躺椅,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 145, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/59.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/145/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/145/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/145/loser.jpg", "save_id": 698, "prompt_en": "Extract the yellow lounge chair, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出这个黄色的躺椅,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 146, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/59.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/146/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/146/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/146/loser.jpg", "save_id": 698, "prompt_en": "Extract the yellow lounge chair, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出这个黄色的躺椅,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 147, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/80.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/147/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/147/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/147/loser.jpg", "save_id": 700, "prompt_en": "Extract the metal lantern from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的金属灯笼,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 148, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/80.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/148/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/148/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/148/loser.jpg", "save_id": 700, "prompt_en": "Extract the metal lantern from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的金属灯笼,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 149, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/84.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/149/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/149/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/149/loser.jpg", "save_id": 701, "prompt_en": "Extract the yellow motorcycle from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出黄色的摩托车,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 150, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/85.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/150/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/150/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/150/loser.jpg", "save_id": 702, "prompt_en": "Extract the blue Easter egg and its egg holder in the center, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出中间的蓝色彩蛋和蛋托,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 151, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/85.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/151/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/151/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/151/loser.jpg", "save_id": 702, "prompt_en": "Extract the blue Easter egg and its egg holder in the center, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出中间的蓝色彩蛋和蛋托,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 152, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/85.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/152/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/152/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/152/loser.jpg", "save_id": 702, "prompt_en": "Extract the blue Easter egg and its egg holder in the center, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出中间的蓝色彩蛋和蛋托,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 153, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/94.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/153/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/153/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/153/loser.jpg", "save_id": 705, "prompt_en": "Extract the soccer ball from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的足球,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 154, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/96.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/154/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/154/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/154/loser.jpg", "save_id": 706, "prompt_en": "Extract the TV from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的电视,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 155, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/100.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/155/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/155/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/155/loser.jpg", "save_id": 707, "prompt_en": "Extract the larger pigeon, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出较大的鸽子,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 156, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/100.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/156/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/156/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/156/loser.jpg", "save_id": 707, "prompt_en": "Extract the larger pigeon, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出较大的鸽子,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 157, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/103.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/157/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/157/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/157/loser.jpg", "save_id": 709, "prompt_en": "Extract the anime girl from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的动漫女孩,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 158, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/103.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/158/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/158/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/158/loser.jpg", "save_id": 709, "prompt_en": "Extract the anime girl from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的动漫女孩,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 160, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/110.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/160/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/160/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/160/loser.jpg", "save_id": 711, "prompt_en": "Extract the mask from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的口罩,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 161, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/120.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/161/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/161/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/161/loser.jpg", "save_id": 713, "prompt_en": "Extract the sofa from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中的沙发,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 162, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/123.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/162/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/162/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/162/loser.jpg", "save_id": 714, "prompt_en": "Extract only the plate with the three lollipops, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中的餐盘和三根棒棒糖,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 163, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/163/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/163/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/163/loser.jpg", "save_id": 968, "prompt_en": "Move the single cherry tomato onto the spoon.”", "prompt_cn": "将那颗单独的樱桃番茄移到勺子上。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 164, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/4.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/164/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/164/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/164/loser.jpg", "save_id": 970, "prompt_en": "Move the chair to the right side of the sofa.", "prompt_cn": "将椅子移动到沙发的右侧。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 165, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/5.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/165/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/165/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/165/loser.jpg", "save_id": 971, "prompt_en": "Move the robot to the left.", "prompt_cn": "将机器人移动到左侧。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 166, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/17.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/166/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/166/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/166/loser.jpg", "save_id": 981, "prompt_en": "Move the table lamp to the right bedside table.", "prompt_cn": "将台灯移到右侧的床头柜上。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 167, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/17.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/167/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/167/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/167/loser.jpg", "save_id": 981, "prompt_en": "Move the table lamp to the right bedside table.", "prompt_cn": "将台灯移到右侧的床头柜上。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 168, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/22.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/168/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/168/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/168/loser.jpg", "save_id": 985, "prompt_en": "Move the cat onto the sofa.", "prompt_cn": "将猫移动到沙发上。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 170, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part5/Multi_Image_Aware/6/source_1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/170/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/170/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/170/loser.jpg", "save_id": 2889, "prompt_en": "Align the balloons that share the same color in image 1 with the state of the balloons in image 2.", "prompt_cn": "将第一张图像和第二张图像中颜色一样的气球变为相同的状态。", "label": "preference", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/170/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 172, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part5/Multi_Image_Aware/15/source_1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/172/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/172/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/172/loser.jpg", "save_id": 2898, "prompt_en": "Adjust the state of the vase in image 1 to match the state of the cup in image 2.", "prompt_cn": "将第一张图像中花瓶的状态调整为第二张图像中杯子的状态。", "label": "preference", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/172/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 173, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Obj_interaction/9.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/173/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/173/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/173/loser.jpg", "save_id": 1113, "prompt_en": "Have the boy hold the nearby bottle of Coke in his right hand and make him appear to be drinking it.", "prompt_cn": "让这个男孩右手拿着旁边的可乐,并做出正在喝的动作。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 174, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/174/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/174/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/174/loser.jpg", "save_id": 133, "prompt_en": "Remove the rainbow from the sky.", "prompt_cn": "移除天空中的彩虹。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 175, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/66.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/175/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/175/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/175/loser.jpg", "save_id": 147, "prompt_en": "Remove the letter in the center of the image.", "prompt_cn": "移除图片中间的信。 ", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 176, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/269.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/176/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/176/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/176/loser.jpg", "save_id": 162, "prompt_en": "Remove the toilet from the image.", "prompt_cn": "移除图像中的马桶。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 177, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/6.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/177/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/177/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/177/loser.jpg", "save_id": 184, "prompt_en": "Remove all the blue cars.", "prompt_cn": "移除所有蓝色的汽车。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 178, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/7.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/178/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/178/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/178/loser.jpg", "save_id": 185, "prompt_en": "Remove all writing instruments.", "prompt_cn": "移除所有书写工具。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 179, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/7.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/179/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/179/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/179/loser.jpg", "save_id": 185, "prompt_en": "Remove all writing instruments.", "prompt_cn": "移除所有书写工具。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 180, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/8.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/180/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/180/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/180/loser.jpg", "save_id": 186, "prompt_en": "Remove all leather furniture.", "prompt_cn": "移除所有皮革家具。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 181, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/8.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/181/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/181/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/181/loser.jpg", "save_id": 186, "prompt_en": "Remove all leather furniture.", "prompt_cn": "移除所有皮革家具。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 182, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/10.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/182/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/182/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/182/loser.jpg", "save_id": 188, "prompt_en": "Remove all black items.", "prompt_cn": "从图像中移除所有黑色物品。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 183, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/11.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/183/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/183/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/183/loser.jpg", "save_id": 189, "prompt_en": "Remove the broken cookies.", "prompt_cn": "移除破碎的饼干。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 184, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/12.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/184/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/184/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/184/loser.jpg", "save_id": 190, "prompt_en": "Remove all spherical objects.", "prompt_cn": "移除所有球形物体。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 185, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/14.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/185/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/185/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/185/loser.jpg", "save_id": 192, "prompt_en": "Remove all electronic devices.", "prompt_cn": "移除所有电子设备。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 186, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/14.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/186/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/186/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/186/loser.jpg", "save_id": 192, "prompt_en": "Remove all electronic devices.", "prompt_cn": "移除所有电子设备。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 187, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/15.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/187/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/187/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/187/loser.jpg", "save_id": 193, "prompt_en": "Remove all animals from the room.", "prompt_cn": "将房间中的所有动物移除。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 188, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/15.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/188/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/188/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/188/loser.jpg", "save_id": 193, "prompt_en": "Remove all animals from the room.", "prompt_cn": "将房间中的所有动物移除。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 189, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/189/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/189/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/189/loser.jpg", "save_id": 196, "prompt_en": "Remove the armchair on the left side.", "prompt_cn": "删除图像左侧的扶手椅。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 190, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/190/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/190/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/190/loser.jpg", "save_id": 196, "prompt_en": "Remove the armchair on the left side.", "prompt_cn": "删除图像左侧的扶手椅。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 191, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/16.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/191/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/191/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/191/loser.jpg", "save_id": 210, "prompt_en": "Remove the blue sedan parked in front of the pharmacy.", "prompt_cn": "移除停在药店门口的蓝色轿车。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 192, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/19.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/192/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/192/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/192/loser.jpg", "save_id": 213, "prompt_en": "Remove the tennis ball near the racket.", "prompt_cn": "删除在球拍旁边的那个网球。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 193, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/113.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/193/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/193/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/193/loser.jpg", "save_id": 236, "prompt_en": "Replace the pitcher on the windowsill with a desk lamp.", "prompt_cn": "将窗台上的陶壶替换成一盏台灯。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 194, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/113.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/194/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/194/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/194/loser.jpg", "save_id": 236, "prompt_en": "Replace the pitcher on the windowsill with a desk lamp.", "prompt_cn": "将窗台上的陶壶替换成一盏台灯。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 195, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/113.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/195/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/195/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/195/loser.jpg", "save_id": 236, "prompt_en": "Replace the pitcher on the windowsill with a desk lamp.", "prompt_cn": "将窗台上的陶壶替换成一盏台灯。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 196, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/116.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/196/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/196/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/196/loser.jpg", "save_id": 239, "prompt_en": "Swap the red heart in the bear’s hands for a flower.", "prompt_cn": "将小熊手里的红色爱心换成一朵花", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 197, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/125.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/197/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/197/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/197/loser.jpg", "save_id": 243, "prompt_en": "Replace the cylindrical streetlight on the left side of the road with a glowing golden trash can.", "prompt_cn": "把道路左边的圆柱路灯换成一个金色发光的垃圾桶。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 198, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/134.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/198/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/198/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/198/loser.jpg", "save_id": 247, "prompt_en": "Turn the cilantro sprig in the upper right of the plate into a small edible purple orchid.", "prompt_cn": "把盘子右上角的香菜叶变成一朵小巧的可食用紫色兰花", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 199, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/135.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/199/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/199/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/199/loser.jpg", "save_id": 248, "prompt_en": "Replace the wall clock with a large round mirror.", "prompt_cn": "将挂钟替换为一面大圆镜子。\n", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 200, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/98.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/200/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/200/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/200/loser.jpg", "save_id": 624, "prompt_en": "Transform the image into a minimalist flat illustration.", "prompt_cn": "将图像转换为极简扁平插画风格。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 201, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/129.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/201/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/201/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/201/loser.jpg", "save_id": 637, "prompt_en": "Transform the image into a realistic style.", "prompt_cn": "将图像转化为写实风格。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 202, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part3/Text_cn/4.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/202/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/202/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/202/loser.jpg", "save_id": 1606, "prompt_en": "Erase the red dripping Chinese title characters “背后有人” from the top center of the poster.", "prompt_cn": "擦除顶部中央滴血效果的红色中文标题“背后有人”文字", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 204, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part3/Text_cn/9.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/204/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/204/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/204/loser.jpg", "save_id": 1614, "prompt_en": "Add red text reading “守护和平” in the center of the sky at the top of the image, using the same font style as the text “长津湖” in the image.", "prompt_cn": "在画面上方的天空中央添加红色文字“守护和平”,字体风格与图中文字“长津湖”保持一致。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 205, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part3/Text_cn/9.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/205/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/205/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/205/loser.jpg", "save_id": 1615, "prompt_en": "Change the title '长津湖' to '红海行动'", "prompt_cn": "将标题“长津湖”更改为“红海行动”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 206, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part3/Text_cn/11.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/206/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/206/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/206/loser.jpg", "save_id": 1619, "prompt_en": "Change '全场五折起' to '买一送一'", "prompt_cn": "将文字“全场五折起”更改为“买一送一”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 207, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part3/Text_en/1.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/207/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/207/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/207/loser.jpg", "save_id": 1737, "prompt_en": "Remove the large metallic silver English title text “CYBER HUNT” at the top of the poster.", "prompt_cn": "移除画面顶部巨大的银色英文标题“CYBER HUNT”文字效果", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 208, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part3/Text_en/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/208/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/208/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/208/loser.jpg", "save_id": 1739, "prompt_en": "Erase the blood-red English title text “THE GHOST” from the upper center of the image.", "prompt_cn": "擦除画面上方中央血红色的英文标题“THE GHOST”文字", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 209, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part3/Text_en/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/209/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/209/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/209/loser.jpg", "save_id": 1740, "prompt_en": "Change the title 'THE GHOST' to 'THE ALIEN'.", "prompt_cn": "将标题文字从“THE GHOST”更改为“THE ALIEN”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 210, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part3/Text_en/8.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/210/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/210/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/210/loser.jpg", "save_id": 1749, "prompt_en": "Remove the large yellow English text and numbers “5 STARS” at the lower left corner.", "prompt_cn": "移除左下角大号的黄色英文数字与单词“5 STARS”", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 211, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part3/Text_en/18.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/211/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/211/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/211/loser.jpg", "save_id": 1760, "prompt_en": "Erase the large curved “HARVEST MOON FOLK FEST” English title text at the top.", "prompt_cn": "擦除画面顶部大字的“HARVEST MOON FOLK FEST”英文标题文本", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 212, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part3/Text_en/21.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/212/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/212/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/212/loser.jpg", "save_id": 1764, "prompt_en": "Add light-blue, handwritten-style text 'Beach' to the left of the sunglasses on the sand at the bottom of the image.", "prompt_cn": "在画面下方沙滩上太阳镜的左侧,添加浅蓝色手写风格文字‘Beach’。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 213, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part3/Text_en/23.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/213/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/213/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/213/loser.jpg", "save_id": 1768, "prompt_en": "Change the text 'New Arrival' to 'Final Clearance'.", "prompt_cn": "将文字“New Arrival”更改为“Final Clearance”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 214, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part3/Text_en/25.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/214/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/214/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/214/loser.jpg", "save_id": 1770, "prompt_en": "Add smaller glowing white sans-serif text \"New Generation\" above the central \"iPhone 14\" title.", "prompt_cn": "在画面顶部中央的“iPhone 14”上方添加小一号白色无衬线发光文字“New Generation”", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 215, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/215/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/215/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/215/loser.jpg", "save_id": 1, "prompt_en": "Add an Adidas logo to the side of the white truck box.", "prompt_cn": "在卡车侧面的白色货厢上添加一个阿迪达斯标志。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 219, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/219/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/219/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/219/loser.jpg", "save_id": 4, "prompt_en": "Place a silver metal pen holder containing several black pens to the left of the computer.", "prompt_cn": "放置一只银色金属笔筒装几支黑笔在电脑的左边。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 226, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/226/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/226/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/226/loser.jpg", "save_id": 775, "prompt_en": "Turn the dining chair into a wooden one.", "prompt_cn": "将这把餐椅变成一把木制餐椅。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 227, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/227/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/227/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/227/loser.jpg", "save_id": 775, "prompt_en": "Turn the dining chair into a wooden one.", "prompt_cn": "将这把餐椅变成一把木制餐椅。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 228, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/4.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/228/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/228/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/228/loser.jpg", "save_id": 776, "prompt_en": "Change the pillows to linen material.", "prompt_cn": "将枕头更改为亚麻材质。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 231, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/7.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/231/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/231/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/231/loser.jpg", "save_id": 779, "prompt_en": "Replace the curtains with blue ones made of cotton-linen.", "prompt_cn": "把窗帘换成蓝色的棉麻材质。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 232, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/8.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/232/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/232/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/232/loser.jpg", "save_id": 10, "prompt_en": "Add a pink balloon to the panda’s hand on the far right.", "prompt_cn": "在最右边的熊猫手上加入一个粉色的气球。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 234, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/9.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/234/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/234/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/234/loser.jpg", "save_id": 781, "prompt_en": "Change the carpet to woven velvet.", "prompt_cn": "把地毯变成编织丝绒材质。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 235, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/9.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/235/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/235/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/235/loser.jpg", "save_id": 781, "prompt_en": "Change the carpet to woven velvet.", "prompt_cn": "把地毯变成编织丝绒材质。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 236, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/9.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/236/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/236/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/236/loser.jpg", "save_id": 781, "prompt_en": "Change the carpet to woven velvet.", "prompt_cn": "把地毯变成编织丝绒材质。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 237, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/11.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/237/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/237/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/237/loser.jpg", "save_id": 782, "prompt_en": "Make the coffee table out of glass.", "prompt_cn": "将咖啡桌改为玻璃材质。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 239, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/12.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/239/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/239/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/239/loser.jpg", "save_id": 783, "prompt_en": "Change the table material to marble.", "prompt_cn": "将桌子的材质更改为大理石。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 240, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/12.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/240/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/240/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/240/loser.jpg", "save_id": 783, "prompt_en": "Change the table material to marble.", "prompt_cn": "将桌子的材质更改为大理石。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 242, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/14.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/242/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/242/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/242/loser.jpg", "save_id": 784, "prompt_en": "Replace the dining table material with walnut wood.", "prompt_cn": "将餐桌的材质改为胡桃木的。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 243, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/14.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/243/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/243/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/243/loser.jpg", "save_id": 784, "prompt_en": "Replace the dining table material with walnut wood.", "prompt_cn": "将餐桌的材质改为胡桃木的。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 247, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/15.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/247/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/247/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/247/loser.jpg", "save_id": 785, "prompt_en": "Change the bedside table to pine wood.", "prompt_cn": "将床头柜改成松木的。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 249, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/15.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/249/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/249/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/249/loser.jpg", "save_id": 785, "prompt_en": "Change the bedside table to pine wood.", "prompt_cn": "将床头柜改成松木的。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 252, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/15.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/252/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/252/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/252/loser.jpg", "save_id": 785, "prompt_en": "Change the bedside table to pine wood.", "prompt_cn": "将床头柜改成松木的。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 253, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/16.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/253/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/253/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/253/loser.jpg", "save_id": 786, "prompt_en": "Turn the table into a metal one.", "prompt_cn": "将这张桌子变成一张金属桌子。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 254, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/16.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/254/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/254/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/254/loser.jpg", "save_id": 786, "prompt_en": "Turn the table into a metal one.", "prompt_cn": "将这张桌子变成一张金属桌子。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 255, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/16.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/255/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/255/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/255/loser.jpg", "save_id": 786, "prompt_en": "Turn the table into a metal one.", "prompt_cn": "将这张桌子变成一张金属桌子。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 256, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/16.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/256/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/256/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/256/loser.jpg", "save_id": 786, "prompt_en": "Turn the table into a metal one.", "prompt_cn": "将这张桌子变成一张金属桌子。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 257, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/16.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/257/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/257/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/257/loser.jpg", "save_id": 786, "prompt_en": "Turn the table into a metal one.", "prompt_cn": "将这张桌子变成一张金属桌子。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 259, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/18.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/259/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/259/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/259/loser.jpg", "save_id": 788, "prompt_en": "Make the side table ceramic.", "prompt_cn": "将茶几的材质更改为陶瓷。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 260, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/18.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/260/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/260/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/260/loser.jpg", "save_id": 788, "prompt_en": "Make the side table ceramic.", "prompt_cn": "将茶几的材质更改为陶瓷。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 262, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/20.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/262/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/262/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/262/loser.jpg", "save_id": 20, "prompt_en": "Add a light wood fruit basket containing fresh apples on the table.", "prompt_cn": "在桌子上放置一个浅色木质水果篮装着新鲜苹果", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 263, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/20.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/263/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/263/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/263/loser.jpg", "save_id": 20, "prompt_en": "Add a light wood fruit basket containing fresh apples on the table.", "prompt_cn": "在桌子上放置一个浅色木质水果篮装着新鲜苹果", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 265, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/20.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/265/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/265/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/265/loser.jpg", "save_id": 20, "prompt_en": "Add a light wood fruit basket containing fresh apples on the table.", "prompt_cn": "在桌子上放置一个浅色木质水果篮装着新鲜苹果", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 269, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/22.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/269/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/269/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/269/loser.jpg", "save_id": 791, "prompt_en": "Change the material of this jacket to denim.", "prompt_cn": "将这件夹克的材质改为牛仔材质。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 272, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/25.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/272/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/272/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/272/loser.jpg", "save_id": 792, "prompt_en": "Change the coat material to leather.", "prompt_cn": "将外套的材质改为皮质。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 273, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/29.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/273/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/273/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/273/loser.jpg", "save_id": 794, "prompt_en": "Make the pants denim jeans.", "prompt_cn": "将这条裤子改成牛仔裤。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 278, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/19.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/278/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/278/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/278/loser.jpg", "save_id": 19, "prompt_en": "Add a happy, jumping golden retriever next to the worker.", "prompt_cn": "在工人旁边添加一只快乐跳跃的金毛犬。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 279, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/19.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/279/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/279/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/279/loser.jpg", "save_id": 19, "prompt_en": "Add a happy, jumping golden retriever next to the worker.", "prompt_cn": "在工人旁边添加一只快乐跳跃的金毛犬。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 280, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/19.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/280/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/280/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/280/loser.jpg", "save_id": 19, "prompt_en": "Add a happy, jumping golden retriever next to the worker.", "prompt_cn": "在工人旁边添加一只快乐跳跃的金毛犬。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 281, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/19.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/281/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/281/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/281/loser.jpg", "save_id": 19, "prompt_en": "Add a happy, jumping golden retriever next to the worker.", "prompt_cn": "在工人旁边添加一只快乐跳跃的金毛犬。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 284, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/21.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/284/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/284/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/284/loser.jpg", "save_id": 21, "prompt_en": "Place a small coffee table on the rug in front of the sofa.", "prompt_cn": "在沙发前面的地毯上放一个小茶几。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 285, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/21.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/285/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/285/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/285/loser.jpg", "save_id": 21, "prompt_en": "Place a small coffee table on the rug in front of the sofa.", "prompt_cn": "在沙发前面的地毯上放一个小茶几。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 286, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/23.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/286/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/286/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/286/loser.jpg", "save_id": 24, "prompt_en": "Hang a painting on the wall between the two central windows", "prompt_cn": "在两扇窗之间的墙上挂一幅画", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 287, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/23.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/287/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/287/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/287/loser.jpg", "save_id": 24, "prompt_en": "Hang a painting on the wall between the two central windows", "prompt_cn": "在两扇窗之间的墙上挂一幅画", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 289, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/25.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/289/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/289/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/289/loser.jpg", "save_id": 26, "prompt_en": "Place a basketball on the empty ground to the left of the court.", "prompt_cn": "在篮球场左侧空地上放一个篮球。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 290, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/25.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/290/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/290/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/290/loser.jpg", "save_id": 26, "prompt_en": "Place a basketball on the empty ground to the left of the court.", "prompt_cn": "在篮球场左侧空地上放一个篮球。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 291, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/25.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/291/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/291/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/291/loser.jpg", "save_id": 26, "prompt_en": "Place a basketball on the empty ground to the left of the court.", "prompt_cn": "在篮球场左侧空地上放一个篮球。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 292, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/27.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/292/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/292/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/292/loser.jpg", "save_id": 28, "prompt_en": "Add an ice bucket for champagne on the sand beside the left chairs", "prompt_cn": "在左侧椅子旁的沙滩上放一个香槟桶", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 294, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/27.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/294/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/294/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/294/loser.jpg", "save_id": 28, "prompt_en": "Add an ice bucket for champagne on the sand beside the left chairs", "prompt_cn": "在左侧椅子旁的沙滩上放一个香槟桶", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 295, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/28.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/295/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/295/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/295/loser.jpg", "save_id": 29, "prompt_en": "Add a picnic basket on the sand in front left of the table", "prompt_cn": "在桌子左前方的沙滩上添加一个小野餐篮", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 296, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/28.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/296/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/296/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/296/loser.jpg", "save_id": 29, "prompt_en": "Add a picnic basket on the sand in front left of the table", "prompt_cn": "在桌子左前方的沙滩上添加一个小野餐篮", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 297, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/33.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/297/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/297/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/297/loser.jpg", "save_id": 34, "prompt_en": "Replace the bedside lamp on the right nightstand with an alarm clock.", "prompt_cn": "将右侧床头柜的台灯换为一个闹钟。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 298, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/33.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/298/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/298/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/298/loser.jpg", "save_id": 34, "prompt_en": "Replace the bedside lamp on the right nightstand with an alarm clock.", "prompt_cn": "将右侧床头柜的台灯换为一个闹钟。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 299, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/11.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/299/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/299/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/299/loser.jpg", "save_id": 122, "prompt_en": "Add another identical smartphone next to the first one.", "prompt_cn": "在第一部智能手机旁边添加另一部相同的智能手机。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 300, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/11.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/300/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/300/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/300/loser.jpg", "save_id": 122, "prompt_en": "Add another identical smartphone next to the first one.", "prompt_cn": "在第一部智能手机旁边添加另一部相同的智能手机。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 301, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Add_Copy/11.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/301/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/301/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/301/loser.jpg", "save_id": 122, "prompt_en": "Add another identical smartphone next to the first one.", "prompt_cn": "在第一部智能手机旁边添加另一部相同的智能手机。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 302, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/302/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/302/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/302/loser.jpg", "save_id": 899, "prompt_en": "Set the bicycle upright.", "prompt_cn": "将自行车摆放成直立状态。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 303, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/303/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/303/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/303/loser.jpg", "save_id": 899, "prompt_en": "Set the bicycle upright.", "prompt_cn": "将自行车摆放成直立状态。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 304, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/304/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/304/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/304/loser.jpg", "save_id": 899, "prompt_en": "Set the bicycle upright.", "prompt_cn": "将自行车摆放成直立状态。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 305, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/305/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/305/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/305/loser.jpg", "save_id": 899, "prompt_en": "Set the bicycle upright.", "prompt_cn": "将自行车摆放成直立状态。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 306, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/306/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/306/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/306/loser.jpg", "save_id": 899, "prompt_en": "Set the bicycle upright.", "prompt_cn": "将自行车摆放成直立状态。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 307, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/307/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/307/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/307/loser.jpg", "save_id": 899, "prompt_en": "Set the bicycle upright.", "prompt_cn": "将自行车摆放成直立状态。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 308, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/4.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/308/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/308/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/308/loser.jpg", "save_id": 900, "prompt_en": "Make the Minion hold a bouquet of flowers.", "prompt_cn": "让小黄人拿着一束花。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 309, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/4.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/309/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/309/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/309/loser.jpg", "save_id": 900, "prompt_en": "Make the Minion hold a bouquet of flowers.", "prompt_cn": "让小黄人拿着一束花。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 310, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/4.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/310/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/310/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/310/loser.jpg", "save_id": 900, "prompt_en": "Make the Minion hold a bouquet of flowers.", "prompt_cn": "让小黄人拿着一束花。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 311, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/4.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/311/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/311/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/311/loser.jpg", "save_id": 900, "prompt_en": "Make the Minion hold a bouquet of flowers.", "prompt_cn": "让小黄人拿着一束花。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 312, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/4.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/312/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/312/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/312/loser.jpg", "save_id": 900, "prompt_en": "Make the Minion hold a bouquet of flowers.", "prompt_cn": "让小黄人拿着一束花。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 313, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/5.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/313/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/313/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/313/loser.jpg", "save_id": 901, "prompt_en": "Make the boy look like he is talking on the phone.", "prompt_cn": "让男孩看起来好像正在打电话。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 314, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/6.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/314/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/314/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/314/loser.jpg", "save_id": 902, "prompt_en": "Make this Pikachu jump up to pick the fruit.", "prompt_cn": "让这只皮卡丘跳起来去摘水果。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 315, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/6.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/315/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/315/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/315/loser.jpg", "save_id": 902, "prompt_en": "Make this Pikachu jump up to pick the fruit.", "prompt_cn": "让这只皮卡丘跳起来去摘水果。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 316, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/7.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/316/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/316/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/316/loser.jpg", "save_id": 903, "prompt_en": "Have the girl who is taking the phone photo make a peace sign with her free hand.", "prompt_cn": "让正在用手机拍照的女生,用另一只空着的手做出一个“耶”的手势。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 317, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/7.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/317/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/317/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/317/loser.jpg", "save_id": 903, "prompt_en": "Have the girl who is taking the phone photo make a peace sign with her free hand.", "prompt_cn": "让正在用手机拍照的女生,用另一只空着的手做出一个“耶”的手势。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 318, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/7.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/318/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/318/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/318/loser.jpg", "save_id": 903, "prompt_en": "Have the girl who is taking the phone photo make a peace sign with her free hand.", "prompt_cn": "让正在用手机拍照的女生,用另一只空着的手做出一个“耶”的手势。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 319, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/10.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/319/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/319/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/319/loser.jpg", "save_id": 906, "prompt_en": "Make the girl pull the machine downward.", "prompt_cn": "让这个女孩把器械向下拉。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 320, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/10.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/320/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/320/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/320/loser.jpg", "save_id": 906, "prompt_en": "Make the girl pull the machine downward.", "prompt_cn": "让这个女孩把器械向下拉。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 321, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/10.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/321/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/321/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/321/loser.jpg", "save_id": 906, "prompt_en": "Make the girl pull the machine downward.", "prompt_cn": "让这个女孩把器械向下拉。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 322, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/11.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/322/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/322/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/322/loser.jpg", "save_id": 907, "prompt_en": "Have the girl grip the lat pulldown machine with her hands.", "prompt_cn": "让这个女孩用手握住高位下拉的器械。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 323, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/11.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/323/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/323/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/323/loser.jpg", "save_id": 907, "prompt_en": "Have the girl grip the lat pulldown machine with her hands.", "prompt_cn": "让这个女孩用手握住高位下拉的器械。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 324, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/11.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/324/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/324/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/324/loser.jpg", "save_id": 907, "prompt_en": "Have the girl grip the lat pulldown machine with her hands.", "prompt_cn": "让这个女孩用手握住高位下拉的器械。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 325, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/12.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/325/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/325/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/325/loser.jpg", "save_id": 908, "prompt_en": "Have the boy lie down on the bench he is currently sitting on.", "prompt_cn": "让这个男孩躺在现在做的凳子上", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 326, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/12.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/326/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/326/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/326/loser.jpg", "save_id": 908, "prompt_en": "Have the boy lie down on the bench he is currently sitting on.", "prompt_cn": "让这个男孩躺在现在做的凳子上", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 327, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/13.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/327/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/327/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/327/loser.jpg", "save_id": 909, "prompt_en": "Have the girl start doing seated hip abductions.", "prompt_cn": "让女孩开始进行坐姿髋外展动作。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 328, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/13.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/328/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/328/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/328/loser.jpg", "save_id": 909, "prompt_en": "Have the girl start doing seated hip abductions.", "prompt_cn": "让女孩开始进行坐姿髋外展动作。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 329, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/13.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/329/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/329/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/329/loser.jpg", "save_id": 909, "prompt_en": "Have the girl start doing seated hip abductions.", "prompt_cn": "让女孩开始进行坐姿髋外展动作。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 330, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/13.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/330/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/330/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/330/loser.jpg", "save_id": 909, "prompt_en": "Have the girl start doing seated hip abductions.", "prompt_cn": "让女孩开始进行坐姿髋外展动作。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 331, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/13.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/331/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/331/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/331/loser.jpg", "save_id": 909, "prompt_en": "Have the girl start doing seated hip abductions.", "prompt_cn": "让女孩开始进行坐姿髋外展动作。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 332, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/14.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/332/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/332/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/332/loser.jpg", "save_id": 910, "prompt_en": "Change the girl’s pose to a horse stance.", "prompt_cn": "将女孩的姿势改为马步。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 333, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/14.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/333/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/333/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/333/loser.jpg", "save_id": 910, "prompt_en": "Change the girl’s pose to a horse stance.", "prompt_cn": "将女孩的姿势改为马步。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 334, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/14.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/334/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/334/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/334/loser.jpg", "save_id": 910, "prompt_en": "Change the girl’s pose to a horse stance.", "prompt_cn": "将女孩的姿势改为马步。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 335, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/14.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/335/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/335/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/335/loser.jpg", "save_id": 910, "prompt_en": "Change the girl’s pose to a horse stance.", "prompt_cn": "将女孩的姿势改为马步。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 336, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/14.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/336/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/336/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/336/loser.jpg", "save_id": 910, "prompt_en": "Change the girl’s pose to a horse stance.", "prompt_cn": "将女孩的姿势改为马步。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 337, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Casual_Reason/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/337/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/337/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/337/loser.jpg", "save_id": 1228, "prompt_en": "Remove the toys from the box.", "prompt_cn": "将玩具从盒子中移除。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 338, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/81.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/338/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/338/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/338/loser.jpg", "save_id": 829, "prompt_en": "Keep the dog unchanged and change the background to the interior of a library filled with bookshelves.", "prompt_cn": "保持狗不变,将背景改为书架林立的图书馆内部。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 339, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/167.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/339/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/339/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/339/loser.jpg", "save_id": 844, "prompt_en": "Keep the two people unchanged, and change the background to a futuristic sci‑fi city with hovering vehicles and tall glass towers.", "prompt_cn": "保持两个人不变,将背景改为未来科幻城市,有悬浮车辆和高耸的玻璃塔楼。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 340, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/167.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/340/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/340/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/340/loser.jpg", "save_id": 844, "prompt_en": "Keep the two people unchanged, and change the background to a futuristic sci‑fi city with hovering vehicles and tall glass towers.", "prompt_cn": "保持两个人不变,将背景改为未来科幻城市,有悬浮车辆和高耸的玻璃塔楼。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 341, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/167.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/341/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/341/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/341/loser.jpg", "save_id": 844, "prompt_en": "Keep the two people unchanged, and change the background to a futuristic sci‑fi city with hovering vehicles and tall glass towers.", "prompt_cn": "保持两个人不变,将背景改为未来科幻城市,有悬浮车辆和高耸的玻璃塔楼。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 342, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/204.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/342/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/342/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/342/loser.jpg", "save_id": 846, "prompt_en": "Keep the girl unchanged and change the background to a desert with golden sand dunes covering the ground.", "prompt_cn": "保持女子不变,将背景改为沙漠,地面覆盖着金黄色的沙丘。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 343, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/210.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/343/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/343/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/343/loser.jpg", "save_id": 847, "prompt_en": "Keep the man and the car unchanged, and change the background to a desert highway with sand dunes and a clear blue sky.", "prompt_cn": "保持人物和汽车不变,将背景改为沙丘起伏、蓝天晴朗的沙漠公路。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 344, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/290.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/344/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/344/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/344/loser.jpg", "save_id": 849, "prompt_en": "Keep the shopping cart unchanged, and change the background to the interior of a supermarket.", "prompt_cn": "保持购物车不变,将背景更换为超市内部场景。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 345, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/290.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/345/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/345/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/345/loser.jpg", "save_id": 849, "prompt_en": "Keep the shopping cart unchanged, and change the background to the interior of a supermarket.", "prompt_cn": "保持购物车不变,将背景更换为超市内部场景。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 346, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/292.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/346/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/346/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/346/loser.jpg", "save_id": 850, "prompt_en": "Keep the sleigh and gingerbread unchanged, and change the background to a snowy landscape.", "prompt_cn": "雪橇与姜饼保持不变,背景替换为冬季雪地。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 347, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/299.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/347/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/347/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/347/loser.jpg", "save_id": 851, "prompt_en": "Keep the astronaut and the red terrain unchanged, and change the background to a blue sky.", "prompt_cn": "保持宇航员和红色地貌不动,将背景改为蓝色的天空。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 348, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/170.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/348/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/348/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/348/loser.jpg", "save_id": 561, "prompt_en": "Change the blue jumpsuit to pink.", "prompt_cn": "将蓝色连体衣变成粉色的。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 349, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/170.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/349/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/349/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/349/loser.jpg", "save_id": 561, "prompt_en": "Change the blue jumpsuit to pink.", "prompt_cn": "将蓝色连体衣变成粉色的。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 350, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/170.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/350/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/350/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/350/loser.jpg", "save_id": 561, "prompt_en": "Change the blue jumpsuit to pink.", "prompt_cn": "将蓝色连体衣变成粉色的。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 351, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/170.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/351/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/351/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/351/loser.jpg", "save_id": 561, "prompt_en": "Change the blue jumpsuit to pink.", "prompt_cn": "将蓝色连体衣变成粉色的。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 352, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/170.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/352/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/352/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/352/loser.jpg", "save_id": 561, "prompt_en": "Change the blue jumpsuit to pink.", "prompt_cn": "将蓝色连体衣变成粉色的。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 353, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/170.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/353/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/353/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/353/loser.jpg", "save_id": 561, "prompt_en": "Change the blue jumpsuit to pink.", "prompt_cn": "将蓝色连体衣变成粉色的。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 354, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/177.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/354/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/354/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/354/loser.jpg", "save_id": 563, "prompt_en": "Change the color of the shoes to sky blue.", "prompt_cn": "将鞋子的颜色改为天空蓝。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 355, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/177.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/355/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/355/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/355/loser.jpg", "save_id": 563, "prompt_en": "Change the color of the shoes to sky blue.", "prompt_cn": "将鞋子的颜色改为天空蓝。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 356, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/202.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/356/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/356/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/356/loser.jpg", "save_id": 566, "prompt_en": "Adjust the girl's height to 4/5 of her current height.", "prompt_cn": "将这个女生的身高调整为当前的4/5。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 357, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/202.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/357/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/357/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/357/loser.jpg", "save_id": 566, "prompt_en": "Adjust the girl's height to 4/5 of her current height.", "prompt_cn": "将这个女生的身高调整为当前的4/5。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 358, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/202.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/358/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/358/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/358/loser.jpg", "save_id": 566, "prompt_en": "Adjust the girl's height to 4/5 of her current height.", "prompt_cn": "将这个女生的身高调整为当前的4/5。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 359, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/241.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/359/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/359/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/359/loser.jpg", "save_id": 570, "prompt_en": "Change the black sedan on the right to a bright solid sky blue.", "prompt_cn": "将右侧黑色轿车车身改成亮丽的天蓝色", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 360, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/200.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/360/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/360/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/360/loser.jpg", "save_id": 299, "prompt_en": "Remove the chair on the left, hang a black suit jacket on the chair on the right, replace the flowers in the vase with roses, and remove the fruits and fruit plate from the image.", "prompt_cn": "移除左侧的椅子,在右侧的椅子上挂一件黑色西装外套,将花瓶里的花替换为玫瑰花,移除图像中的水果和水果盘。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 361, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/200.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/361/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/361/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/361/loser.jpg", "save_id": 299, "prompt_en": "Remove the chair on the left, hang a black suit jacket on the chair on the right, replace the flowers in the vase with roses, and remove the fruits and fruit plate from the image.", "prompt_cn": "移除左侧的椅子,在右侧的椅子上挂一件黑色西装外套,将花瓶里的花替换为玫瑰花,移除图像中的水果和水果盘。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 362, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/201.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/362/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/362/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/362/loser.jpg", "save_id": 300, "prompt_en": "Remove the 'Stop' sign and the pink helmet, and add a skateboard on the road.", "prompt_cn": "移除‘Stop’的标志和粉色的头盔,在路上加入一个滑板。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 363, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/201.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/363/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/363/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/363/loser.jpg", "save_id": 300, "prompt_en": "Remove the 'Stop' sign and the pink helmet, and add a skateboard on the road.", "prompt_cn": "移除‘Stop’的标志和粉色的头盔,在路上加入一个滑板。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 364, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/303.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/364/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/364/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/364/loser.jpg", "save_id": 306, "prompt_en": "Have the girl pick up the water bottle in the center and drink from it, change the pants to the same color as the top, and add a white towel around her neck.", "prompt_cn": "让这个女生拿起中间的水瓶喝水,将裤子改为与上衣相同的颜色,并在脖子上挂一条白色毛巾。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 365, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/311.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/365/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/365/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/365/loser.jpg", "save_id": 312, "prompt_en": "Remove the small potted plant from the table, change the chandelier color to warm yellow, remove the blue plastic box, and add a wooden sign on the wall with the text '努力学习' in a warm, cute handwritten style.", "prompt_cn": "移除桌上的小盆栽,将吊灯的颜色改为暖黄色,移除蓝色塑料箱,并在墙上添加一块写有‘努力学习’的木质牌子,字体为温暖可爱的手写风格。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 366, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/emotion_change/4.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/366/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/366/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/366/loser.jpg", "save_id": 1181, "prompt_en": "Make the boy sink into deep thought.", "prompt_cn": "让这个男孩看起来正在深思。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 367, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/emotion_change/20.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/367/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/367/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/367/loser.jpg", "save_id": 1191, "prompt_en": "Make the baby about to cry.", "prompt_cn": "让这名婴儿看起来好像快要哭出来。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 368, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/10.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/368/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/368/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/368/loser.jpg", "save_id": 692, "prompt_en": "Extract the airplane model from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的飞机模型,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 369, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/59.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/369/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/369/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/369/loser.jpg", "save_id": 698, "prompt_en": "Extract the yellow lounge chair, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出这个黄色的躺椅,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 370, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/85.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/370/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/370/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/370/loser.jpg", "save_id": 702, "prompt_en": "Extract the blue Easter egg and its egg holder in the center, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出中间的蓝色彩蛋和蛋托,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 371, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/85.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/371/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/371/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/371/loser.jpg", "save_id": 702, "prompt_en": "Extract the blue Easter egg and its egg holder in the center, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出中间的蓝色彩蛋和蛋托,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 372, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/85.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/372/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/372/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/372/loser.jpg", "save_id": 702, "prompt_en": "Extract the blue Easter egg and its egg holder in the center, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出中间的蓝色彩蛋和蛋托,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 373, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/94.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/373/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/373/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/373/loser.jpg", "save_id": 705, "prompt_en": "Extract the soccer ball from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的足球,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 374, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/96.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/374/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/374/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/374/loser.jpg", "save_id": 706, "prompt_en": "Extract the TV from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的电视,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 375, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/96.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/375/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/375/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/375/loser.jpg", "save_id": 706, "prompt_en": "Extract the TV from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的电视,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 376, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/107.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/376/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/376/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/376/loser.jpg", "save_id": 710, "prompt_en": "Extract the child and the small dog they are walking, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的小孩以及牵着的小狗,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 377, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/120.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/377/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/377/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/377/loser.jpg", "save_id": 713, "prompt_en": "Extract the sofa from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中的沙发,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 378, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/123.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/378/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/378/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/378/loser.jpg", "save_id": 714, "prompt_en": "Extract only the plate with the three lollipops, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中的餐盘和三根棒棒糖,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 379, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/379/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/379/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/379/loser.jpg", "save_id": 968, "prompt_en": "Move the single cherry tomato onto the spoon.”", "prompt_cn": "将那颗单独的樱桃番茄移到勺子上。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 380, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/380/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/380/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/380/loser.jpg", "save_id": 968, "prompt_en": "Move the single cherry tomato onto the spoon.”", "prompt_cn": "将那颗单独的樱桃番茄移到勺子上。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 381, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/4.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/381/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/381/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/381/loser.jpg", "save_id": 970, "prompt_en": "Move the chair to the right side of the sofa.", "prompt_cn": "将椅子移动到沙发的右侧。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 382, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/5.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/382/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/382/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/382/loser.jpg", "save_id": 971, "prompt_en": "Move the robot to the left.", "prompt_cn": "将机器人移动到左侧。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 383, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/15.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/383/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/383/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/383/loser.jpg", "save_id": 980, "prompt_en": "Move the painting to the right.", "prompt_cn": "将这幅画向右移动。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 384, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/15.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/384/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/384/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/384/loser.jpg", "save_id": 980, "prompt_en": "Move the painting to the right.", "prompt_cn": "将这幅画向右移动。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 385, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/20.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/385/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/385/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/385/loser.jpg", "save_id": 983, "prompt_en": "Move the wall clock to the left of the television.", "prompt_cn": "将墙上的时钟移动到电视的左侧。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 386, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/22.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/386/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/386/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/386/loser.jpg", "save_id": 985, "prompt_en": "Move the cat onto the sofa.", "prompt_cn": "将猫移动到沙发上。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 387, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/24.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/387/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/387/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/387/loser.jpg", "save_id": 987, "prompt_en": "Place the pen on the left onto the coffee cup on the right.", "prompt_cn": "把左边的笔放在右边的咖啡杯上。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 388, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part5/Multi_Image_Aware/1/source_1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/388/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/388/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/388/loser.jpg", "save_id": 2884, "prompt_en": "Adjust the color and shine of the green apple in the fruit bowl in image 1 to match the apple in image 2.", "prompt_cn": "调整第一张图像中水果盘中青苹果的颜色和光泽,使其与第二张图像中的苹果一致。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/388/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 389, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part5/Multi_Image_Aware/5/source_1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/389/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/389/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/389/loser.jpg", "save_id": 2888, "prompt_en": "Adjust the material of the ring on the hand in image 1 to match the material of the bracelet in image 2.", "prompt_cn": "将第一张图像中手上的戒指的材质和第二张图像中镯子的材质保持一致。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/389/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 390, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part5/Multi_Image_Aware/6/source_1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/390/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/390/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/390/loser.jpg", "save_id": 2889, "prompt_en": "Align the balloons that share the same color in image 1 with the state of the balloons in image 2.", "prompt_cn": "将第一张图像和第二张图像中颜色一样的气球变为相同的状态。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/390/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 391, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part5/Multi_Image_Aware/13/source_1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/391/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/391/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/391/loser.jpg", "save_id": 2896, "prompt_en": "Remove the objects in image 1 that serve the same function as those in image 2.", "prompt_cn": "将第一张图像与第二张图像中物体功能相同的物体移除", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/391/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 392, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Obj_interaction/5.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/392/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/392/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/392/loser.jpg", "save_id": 1110, "prompt_en": "Make the man and woman hug each other tightly.", "prompt_cn": "让这名男子和女子紧紧拥抱在一起。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 393, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Obj_interaction/5.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/393/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/393/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/393/loser.jpg", "save_id": 1110, "prompt_en": "Make the man and woman hug each other tightly.", "prompt_cn": "让这名男子和女子紧紧拥抱在一起。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 394, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Obj_interaction/7.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/394/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/394/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/394/loser.jpg", "save_id": 1112, "prompt_en": "Make the boy and girl hold hands.", "prompt_cn": "让这个男孩和这个女孩牵着手。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 395, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Obj_interaction/7.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/395/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/395/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/395/loser.jpg", "save_id": 1112, "prompt_en": "Make the boy and girl hold hands.", "prompt_cn": "让这个男孩和这个女孩牵着手。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 396, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Obj_interaction/10.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/396/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/396/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/396/loser.jpg", "save_id": 1114, "prompt_en": "Make the two boys bump fists.", "prompt_cn": "让两个男生正在进行碰拳的动作。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 397, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Obj_interaction/10.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/397/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/397/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/397/loser.jpg", "save_id": 1114, "prompt_en": "Make the two boys bump fists.", "prompt_cn": "让两个男生正在进行碰拳的动作。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 398, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Obj_interaction/13.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/398/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/398/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/398/loser.jpg", "save_id": 1117, "prompt_en": "Have the older brother lift up his younger sister.", "prompt_cn": "让哥哥把妹妹抱起来。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 399, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Obj_interaction/13.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/399/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/399/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/399/loser.jpg", "save_id": 1117, "prompt_en": "Have the older brother lift up his younger sister.", "prompt_cn": "让哥哥把妹妹抱起来。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 400, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Obj_interaction/19.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/400/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/400/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/400/loser.jpg", "save_id": 1123, "prompt_en": "Make the boy kick the soccer ball.", "prompt_cn": "让这个男孩看起来正在踢足球。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 401, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Obj_interaction/20.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/401/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/401/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/401/loser.jpg", "save_id": 1124, "prompt_en": "Have the girl in the image pick up the tennis racket.", "prompt_cn": "让图中的女生捡起来网球拍。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 402, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Obj_interaction/21.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/402/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/402/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/402/loser.jpg", "save_id": 1125, "prompt_en": "Make the player holding the volleyball appear to be serving the ball.", "prompt_cn": "让拿着排球的这名球员看起来正在发排球。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 403, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Obj_interaction/21.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/403/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/403/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/403/loser.jpg", "save_id": 1125, "prompt_en": "Make the player holding the volleyball appear to be serving the ball.", "prompt_cn": "让拿着排球的这名球员看起来正在发排球。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 404, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Obj_interaction/21.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/404/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/404/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/404/loser.jpg", "save_id": 1125, "prompt_en": "Make the player holding the volleyball appear to be serving the ball.", "prompt_cn": "让拿着排球的这名球员看起来正在发排球。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 405, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/405/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/405/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/405/loser.jpg", "save_id": 132, "prompt_en": "Remove the blue overhead traffic sign from the image.", "prompt_cn": "移除蓝色的交通指路牌。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 406, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/406/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/406/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/406/loser.jpg", "save_id": 133, "prompt_en": "Remove the rainbow from the sky.", "prompt_cn": "移除天空中的彩虹。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 407, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/10.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/407/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/407/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/407/loser.jpg", "save_id": 136, "prompt_en": "Remove the magnifying glass from the image.", "prompt_cn": "移除图中的放大镜 ", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 408, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/10.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/408/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/408/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/408/loser.jpg", "save_id": 136, "prompt_en": "Remove the magnifying glass from the image.", "prompt_cn": "移除图中的放大镜 ", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 409, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/13.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/409/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/409/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/409/loser.jpg", "save_id": 137, "prompt_en": "Remove the green plant on the cabinet.", "prompt_cn": "移除柜子上的绿色植物", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 410, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/16.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/410/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/410/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/410/loser.jpg", "save_id": 138, "prompt_en": "Remove the robot figure on the right.", "prompt_cn": "移除右边的机器人公仔", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 411, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/27.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/411/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/411/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/411/loser.jpg", "save_id": 140, "prompt_en": "Remove the heart from the image.", "prompt_cn": "移除图像中的爱心。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 412, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/27.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/412/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/412/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/412/loser.jpg", "save_id": 140, "prompt_en": "Remove the heart from the image.", "prompt_cn": "移除图像中的爱心。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 413, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/30.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/413/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/413/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/413/loser.jpg", "save_id": 142, "prompt_en": "Remove the umbrella in the air.", "prompt_cn": "移除空中的雨伞。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 414, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/30.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/414/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/414/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/414/loser.jpg", "save_id": 142, "prompt_en": "Remove the umbrella in the air.", "prompt_cn": "移除空中的雨伞。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 415, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/30.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/415/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/415/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/415/loser.jpg", "save_id": 142, "prompt_en": "Remove the umbrella in the air.", "prompt_cn": "移除空中的雨伞。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 416, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/36.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/416/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/416/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/416/loser.jpg", "save_id": 144, "prompt_en": "Remove the framed artwork on the wall above the bed.", "prompt_cn": "移除床头上方墙上的挂画。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 417, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/36.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/417/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/417/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/417/loser.jpg", "save_id": 144, "prompt_en": "Remove the framed artwork on the wall above the bed.", "prompt_cn": "移除床头上方墙上的挂画。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 418, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/37.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/418/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/418/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/418/loser.jpg", "save_id": 145, "prompt_en": "Remove the black headphones hanging on the wall.", "prompt_cn": "移除挂在墙上的黑色耳机。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 419, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/66.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/419/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/419/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/419/loser.jpg", "save_id": 147, "prompt_en": "Remove the letter in the center of the image.", "prompt_cn": "移除图片中间的信。 ", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 420, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/72.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/420/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/420/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/420/loser.jpg", "save_id": 149, "prompt_en": "Remove the metal fork resting on the plate near the cake.", "prompt_cn": "移除盘子上靠近蛋糕的金属叉子", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 421, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/141.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/421/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/421/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/421/loser.jpg", "save_id": 154, "prompt_en": "Remove the curtains from the image.", "prompt_cn": "移除图中的窗帘。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 422, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/245.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/422/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/422/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/422/loser.jpg", "save_id": 155, "prompt_en": "Remove the small boat from the image.", "prompt_cn": "移除图中的小船。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 423, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/257.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/423/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/423/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/423/loser.jpg", "save_id": 157, "prompt_en": "Remove the camera from the image.", "prompt_cn": "移除图中的相机。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 424, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/261.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/424/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/424/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/424/loser.jpg", "save_id": 158, "prompt_en": "Remove all the popcorn pieces that are scattered on the table.", "prompt_cn": "移除桌面上所有散落的爆米花粒。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 425, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/269.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/425/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/425/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/425/loser.jpg", "save_id": 162, "prompt_en": "Remove the toilet from the image.", "prompt_cn": "移除图像中的马桶。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 426, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/6.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/426/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/426/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/426/loser.jpg", "save_id": 184, "prompt_en": "Remove all the blue cars.", "prompt_cn": "移除所有蓝色的汽车。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 427, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/7.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/427/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/427/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/427/loser.jpg", "save_id": 185, "prompt_en": "Remove all writing instruments.", "prompt_cn": "移除所有书写工具。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 428, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/7.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/428/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/428/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/428/loser.jpg", "save_id": 185, "prompt_en": "Remove all writing instruments.", "prompt_cn": "移除所有书写工具。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 429, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/7.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/429/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/429/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/429/loser.jpg", "save_id": 185, "prompt_en": "Remove all writing instruments.", "prompt_cn": "移除所有书写工具。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 430, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/10.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/430/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/430/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/430/loser.jpg", "save_id": 188, "prompt_en": "Remove all black items.", "prompt_cn": "从图像中移除所有黑色物品。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 431, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/11.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/431/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/431/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/431/loser.jpg", "save_id": 189, "prompt_en": "Remove the broken cookies.", "prompt_cn": "移除破碎的饼干。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 432, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/11.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/432/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/432/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/432/loser.jpg", "save_id": 189, "prompt_en": "Remove the broken cookies.", "prompt_cn": "移除破碎的饼干。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 433, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/11.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/433/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/433/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/433/loser.jpg", "save_id": 189, "prompt_en": "Remove the broken cookies.", "prompt_cn": "移除破碎的饼干。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 434, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/12.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/434/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/434/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/434/loser.jpg", "save_id": 190, "prompt_en": "Remove all spherical objects.", "prompt_cn": "移除所有球形物体。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 435, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/12.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/435/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/435/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/435/loser.jpg", "save_id": 190, "prompt_en": "Remove all spherical objects.", "prompt_cn": "移除所有球形物体。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 436, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/12.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/436/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/436/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/436/loser.jpg", "save_id": 190, "prompt_en": "Remove all spherical objects.", "prompt_cn": "移除所有球形物体。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 437, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/14.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/437/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/437/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/437/loser.jpg", "save_id": 192, "prompt_en": "Remove all electronic devices.", "prompt_cn": "移除所有电子设备。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 438, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/14.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/438/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/438/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/438/loser.jpg", "save_id": 192, "prompt_en": "Remove all electronic devices.", "prompt_cn": "移除所有电子设备。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 439, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/14.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/439/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/439/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/439/loser.jpg", "save_id": 192, "prompt_en": "Remove all electronic devices.", "prompt_cn": "移除所有电子设备。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 440, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/15.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/440/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/440/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/440/loser.jpg", "save_id": 193, "prompt_en": "Remove all animals from the room.", "prompt_cn": "将房间中的所有动物移除。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 441, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/17.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/441/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/441/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/441/loser.jpg", "save_id": 195, "prompt_en": "Remove all vehicles from the street.", "prompt_cn": "从街道上移除所有车辆。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 442, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/442/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/442/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/442/loser.jpg", "save_id": 196, "prompt_en": "Remove the armchair on the left side.", "prompt_cn": "删除图像左侧的扶手椅。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 443, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/3.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/443/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/443/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/443/loser.jpg", "save_id": 198, "prompt_en": "Remove the plant on the far right.", "prompt_cn": "移除最右侧的那盆植物。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 444, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/4.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/444/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/444/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/444/loser.jpg", "save_id": 199, "prompt_en": "Remove the top book from the stack on the right side of the image.", "prompt_cn": "移除右边最上面的那本书。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 445, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/4.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/445/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/445/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/445/loser.jpg", "save_id": 199, "prompt_en": "Remove the top book from the stack on the right side of the image.", "prompt_cn": "移除右边最上面的那本书。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 446, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/4.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/446/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/446/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/446/loser.jpg", "save_id": 199, "prompt_en": "Remove the top book from the stack on the right side of the image.", "prompt_cn": "移除右边最上面的那本书。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 447, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/7.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/447/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/447/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/447/loser.jpg", "save_id": 201, "prompt_en": "Remove the vehicle that is farther away from the camera.", "prompt_cn": "移除离镜头更远的车辆。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 448, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/8.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/448/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/448/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/448/loser.jpg", "save_id": 202, "prompt_en": "Remove the pillow on the right.", "prompt_cn": "移除右侧的枕头。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 449, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/15.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/449/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/449/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/449/loser.jpg", "save_id": 209, "prompt_en": "Remove the dog closest to the fire hydrant.", "prompt_cn": "移除距离消防栓最近的那只狗。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 450, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/15.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/450/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/450/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/450/loser.jpg", "save_id": 209, "prompt_en": "Remove the dog closest to the fire hydrant.", "prompt_cn": "移除距离消防栓最近的那只狗。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 451, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/15.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/451/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/451/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/451/loser.jpg", "save_id": 209, "prompt_en": "Remove the dog closest to the fire hydrant.", "prompt_cn": "移除距离消防栓最近的那只狗。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 452, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/15.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/452/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/452/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/452/loser.jpg", "save_id": 209, "prompt_en": "Remove the dog closest to the fire hydrant.", "prompt_cn": "移除距离消防栓最近的那只狗。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 453, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/15.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/453/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/453/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/453/loser.jpg", "save_id": 209, "prompt_en": "Remove the dog closest to the fire hydrant.", "prompt_cn": "移除距离消防栓最近的那只狗。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 454, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/18.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/454/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/454/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/454/loser.jpg", "save_id": 212, "prompt_en": "Remove the toy car near the dollhouse.", "prompt_cn": "移除靠近玩偶屋的那辆玩具汽车。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 455, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/18.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/455/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/455/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/455/loser.jpg", "save_id": 212, "prompt_en": "Remove the toy car near the dollhouse.", "prompt_cn": "移除靠近玩偶屋的那辆玩具汽车。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 456, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/18.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/456/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/456/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/456/loser.jpg", "save_id": 212, "prompt_en": "Remove the toy car near the dollhouse.", "prompt_cn": "移除靠近玩偶屋的那辆玩具汽车。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 457, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/18.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/457/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/457/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/457/loser.jpg", "save_id": 212, "prompt_en": "Remove the toy car near the dollhouse.", "prompt_cn": "移除靠近玩偶屋的那辆玩具汽车。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 458, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/18.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/458/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/458/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/458/loser.jpg", "save_id": 212, "prompt_en": "Remove the toy car near the dollhouse.", "prompt_cn": "移除靠近玩偶屋的那辆玩具汽车。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 459, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/20.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/459/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/459/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/459/loser.jpg", "save_id": 214, "prompt_en": "Remove the cat next to the ball of yarn.", "prompt_cn": "移除靠近毛线球的那只猫。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 460, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/42.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/460/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/460/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/460/loser.jpg", "save_id": 226, "prompt_en": "Replace the pink knitted baby booties with a pink knitted baby hat featuring a large bow or a pom-pom.", "prompt_cn": "将粉色的针织婴儿鞋改为一顶带有大蝴蝶结或绒球的粉色针织婴儿帽 ", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 461, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/93.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/461/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/461/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/461/loser.jpg", "save_id": 230, "prompt_en": "Replace the trash can closest to the camera on the right side of the sidewalk with a vending machine.", "prompt_cn": "把道路右边的人行道离镜头最近的垃圾桶替换成一个自动售货机。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 462, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/95.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/462/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/462/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/462/loser.jpg", "save_id": 231, "prompt_en": "Replace the mug with a DSLR camera.", "prompt_cn": "将杯子替换为一台单反相机。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 463, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/97.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/463/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/463/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/463/loser.jpg", "save_id": 232, "prompt_en": "Replace the left painting with a world map.", "prompt_cn": "将左侧画作换成一幅世界地图。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 464, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/116.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/464/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/464/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/464/loser.jpg", "save_id": 239, "prompt_en": "Swap the red heart in the bear’s hands for a flower.", "prompt_cn": "将小熊手里的红色爱心换成一朵花", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 465, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/116.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/465/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/465/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/465/loser.jpg", "save_id": 239, "prompt_en": "Swap the red heart in the bear’s hands for a flower.", "prompt_cn": "将小熊手里的红色爱心换成一朵花", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 466, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/116.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/466/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/466/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/466/loser.jpg", "save_id": 239, "prompt_en": "Swap the red heart in the bear’s hands for a flower.", "prompt_cn": "将小熊手里的红色爱心换成一朵花", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 467, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/135.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/467/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/467/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/467/loser.jpg", "save_id": 248, "prompt_en": "Replace the wall clock with a large round mirror.", "prompt_cn": "将挂钟替换为一面大圆镜子。\n", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 468, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/73.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/468/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/468/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/468/loser.jpg", "save_id": 616, "prompt_en": "Convert the scene into a vintage 1970s film photo with grain and warm tones.", "prompt_cn": "将场景转换为带颗粒感和暖色调的 1970 年代复古胶片照片风格。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 469, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/73.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/469/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/469/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/469/loser.jpg", "save_id": 616, "prompt_en": "Convert the scene into a vintage 1970s film photo with grain and warm tones.", "prompt_cn": "将场景转换为带颗粒感和暖色调的 1970 年代复古胶片照片风格。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 470, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/93.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/470/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/470/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/470/loser.jpg", "save_id": 621, "prompt_en": "Transform the image into a Cubism style.", "prompt_cn": "将图像转化为立体主义风格。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 471, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/93.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/471/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/471/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/471/loser.jpg", "save_id": 621, "prompt_en": "Transform the image into a Cubism style.", "prompt_cn": "将图像转化为立体主义风格。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 472, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/128.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/472/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/472/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/472/loser.jpg", "save_id": 636, "prompt_en": "Convert the entire image into a low‑poly geometric illustration style.", "prompt_cn": "将整张图像转换为低多边形立体插画风格。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 473, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Swap/1.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/473/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/473/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/473/loser.jpg", "save_id": 1037, "prompt_en": "Swap the positions of the cup and the spoon.", "prompt_cn": "交换杯子和勺子的的位置。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 474, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Swap/1.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/474/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/474/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/474/loser.jpg", "save_id": 1037, "prompt_en": "Swap the positions of the cup and the spoon.", "prompt_cn": "交换杯子和勺子的的位置。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 475, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part3/Text_cn/5.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/475/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/475/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/475/loser.jpg", "save_id": 1608, "prompt_en": "Remove the brown Chinese title text “暑期档合家欢” from the central white banner.", "prompt_cn": "移除中央白色横幅上棕色的“暑期档合家欢”中文标题文字", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 476, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part3/Text_cn/9.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/476/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/476/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/476/loser.jpg", "save_id": 1614, "prompt_en": "Add red text reading “守护和平” in the center of the sky at the top of the image, using the same font style as the text “长津湖” in the image.", "prompt_cn": "在画面上方的天空中央添加红色文字“守护和平”,字体风格与图中文字“长津湖”保持一致。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 477, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part3/Text_en/1.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/477/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/477/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/477/loser.jpg", "save_id": 1737, "prompt_en": "Remove the large metallic silver English title text “CYBER HUNT” at the top of the poster.", "prompt_cn": "移除画面顶部巨大的银色英文标题“CYBER HUNT”文字效果", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 478, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part3/Text_en/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/478/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/478/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/478/loser.jpg", "save_id": 1740, "prompt_en": "Change the title 'THE GHOST' to 'THE ALIEN'.", "prompt_cn": "将标题文字从“THE GHOST”更改为“THE ALIEN”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 479, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part3/Text_en/9.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/479/source.jpg", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/479/winner.jpg", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/479/loser.jpg", "save_id": 1750, "prompt_en": "Remove the large yellow English title text “THE GRIP OF TERROR!” in the lower right.", "prompt_cn": "删除右下角大号黄色英文标题“THE GRIP OF TERROR!”文字", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 480, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/480/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/1/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/1/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/480/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/480/tie_2.png", "save_id": 1, "prompt_en": "Add an Adidas logo to the side of the white truck box.", "prompt_cn": "在卡车侧面的白色货厢上添加一个阿迪达斯标志。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 481, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/481/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/3/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/3/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/481/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/481/tie_2.png", "save_id": 3, "prompt_en": "Add a Los Angeles Lakers Kobe Bryant jersey inside the wooden frame.", "prompt_cn": "在墙上挂着的木质画框内,添加一件洛杉矶湖人队科比·布莱恩特的球衣。 ", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 482, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/482/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/4/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/4/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/482/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/482/tie_2.png", "save_id": 4, "prompt_en": "Place a silver metal pen holder containing several black pens to the left of the computer.", "prompt_cn": "放置一只银色金属笔筒装几支黑笔在电脑的左边。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 483, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/483/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/4/14.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/4/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/483/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/483/loser.png", "save_id": 4, "prompt_en": "Place a silver metal pen holder containing several black pens to the left of the computer.", "prompt_cn": "放置一只银色金属笔筒装几支黑笔在电脑的左边。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 484, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/8.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/484/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/10/15.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/10/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/484/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/484/tie_2.png", "save_id": 10, "prompt_en": "Add a pink balloon to the panda’s hand on the far right.", "prompt_cn": "在最右边的熊猫手上加入一个粉色的气球。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 485, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/18.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/485/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/18/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/18/15.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/485/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/485/loser.png", "save_id": 18, "prompt_en": "Add a steaming ceramic mug of tea to the right of the book on the checkered tablecloth.", "prompt_cn": "在格子桌布上的书右边添加一个冒着热气的陶瓷茶杯。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 486, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/19.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/486/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/19/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/19/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/486/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/486/tie_2.png", "save_id": 19, "prompt_en": "Add a happy, jumping golden retriever next to the worker.", "prompt_cn": "在工人旁边添加一只快乐跳跃的金毛犬。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 487, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/20.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/487/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/20/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/20/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/487/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/487/loser.png", "save_id": 20, "prompt_en": "Add a light wood fruit basket containing fresh apples on the table.", "prompt_cn": "在桌子上放置一个浅色木质水果篮装着新鲜苹果", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 488, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/23.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/488/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/23/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/23/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/488/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/488/loser.png", "save_id": 23, "prompt_en": "Place a desk in front of the left window against the wall", "prompt_cn": "在左侧窗前放一张书桌", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 489, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/23.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/489/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/23/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/23/12.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/489/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/489/loser.png", "save_id": 23, "prompt_en": "Place a desk in front of the left window against the wall", "prompt_cn": "在左侧窗前放一张书桌", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 490, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/25.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/490/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/26/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/26/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/490/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/490/tie_2.png", "save_id": 26, "prompt_en": "Place a basketball on the empty ground to the left of the court.", "prompt_cn": "在篮球场左侧空地上放一个篮球。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 491, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/28.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/491/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/29/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/29/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/491/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/491/tie_2.png", "save_id": 29, "prompt_en": "Add a picnic basket on the sand in front left of the table", "prompt_cn": "在桌子左前方的沙滩上添加一个小野餐篮", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 492, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/33.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/492/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/34/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/34/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/492/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/492/tie_2.png", "save_id": 34, "prompt_en": "Replace the bedside lamp on the right nightstand with an alarm clock.", "prompt_cn": "将右侧床头柜的台灯换为一个闹钟。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 493, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/38.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/493/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/39/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/39/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/493/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/493/tie_2.png", "save_id": 39, "prompt_en": "Place a light gray fabric cushion in the middle of the wooden bench.", "prompt_cn": "在长椅中央放一个浅灰色布艺靠垫", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 494, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/40.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/494/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/41/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/41/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/494/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/494/loser.png", "save_id": 41, "prompt_en": "Put a brown scarf on the woman holding the bag.", "prompt_cn": "让拿着包的女士戴上一条的咖色丝巾。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 495, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/40.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/495/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/41/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/41/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/495/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/495/loser.png", "save_id": 41, "prompt_en": "Put a brown scarf on the woman holding the bag.", "prompt_cn": "让拿着包的女士戴上一条的咖色丝巾。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 496, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/46.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/496/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/48/14.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/48/15.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/496/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/496/tie_2.png", "save_id": 48, "prompt_en": "Place a light gray fabric conference speaker on the right side of the table.", "prompt_cn": "在桌子的右侧放一个浅灰色布艺会议音箱。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 497, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/48.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/497/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/50/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/50/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/497/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/497/loser.png", "save_id": 50, "prompt_en": "Add a boy behind this sled.", "prompt_cn": "给这个雪橇后面加入一个男生。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 498, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/54.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/498/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/55/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/55/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/498/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/498/loser.png", "save_id": 55, "prompt_en": "Add an orange hot air balloon rising with the wind in the upper left sky.", "prompt_cn": "在天空左上方加入一只橙色热气球随风上升", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 499, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/55.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/499/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/56/14.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/56/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/499/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/499/tie_2.png", "save_id": 56, "prompt_en": "Add a bag of chips to the left of the cat.", "prompt_cn": "在猫咪的左边加入一包薯片", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 500, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/59.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/500/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/60/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/60/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/500/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/500/loser.png", "save_id": 60, "prompt_en": "Place a Los Angeles Lakers LeBron James jersey on the yellow lounge chair.", "prompt_cn": "在黄色躺椅上放一件湖人队勒布朗·詹姆斯的球衣。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 501, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/68.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/501/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/68/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/68/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/501/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/501/tie_2.png", "save_id": 68, "prompt_en": "Add a pair of white Nike Air Force 1 sneakers on the desk.", "prompt_cn": "在桌子上加入一双 Nike Air Force 1 白色球鞋。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 502, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/68.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/502/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/68/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/68/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/502/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/502/loser.png", "save_id": 68, "prompt_en": "Add a pair of white Nike Air Force 1 sneakers on the desk.", "prompt_cn": "在桌子上加入一双 Nike Air Force 1 白色球鞋。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 503, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/102.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/503/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/74/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/74/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/503/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/503/tie_2.png", "save_id": 74, "prompt_en": "Place a silver spacecraft model on a stand on the left lawn.", "prompt_cn": "在左侧草地上放置一艘停在支架上的银色太空飞船模型", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 504, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/148.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/504/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/80/15.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/80/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/504/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/504/tie_2.png", "save_id": 80, "prompt_en": "Add a Minion to the road.", "prompt_cn": "在道路上加入一个小黄人。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 505, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/242.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/505/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/83/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/83/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/505/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/505/tie_2.png", "save_id": 83, "prompt_en": "Add a ping pong paddle on the ping pong table.", "prompt_cn": "在乒乓球桌上加入一个乒乓球拍。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 506, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/242.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/506/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/83/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/83/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/506/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/506/loser.png", "save_id": 83, "prompt_en": "Add a ping pong paddle on the ping pong table.", "prompt_cn": "在乒乓球桌上加入一个乒乓球拍。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 507, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/256.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/507/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/85/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/85/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/507/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/507/loser.png", "save_id": 85, "prompt_en": "Place a road sign on the grassy area to the left of the road.", "prompt_cn": "在道路旁左侧草地放置一个路牌", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 508, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/264.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/508/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/88/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/88/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/508/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/508/loser.png", "save_id": 88, "prompt_en": "Add a bright blue paint roller leaning against the wall.", "prompt_cn": "在墙边添加一只亮蓝色的滚筒刷,靠在墙上。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 509, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/264.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/509/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/88/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/88/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/509/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/509/loser.png", "save_id": 88, "prompt_en": "Add a bright blue paint roller leaning against the wall.", "prompt_cn": "在墙边添加一只亮蓝色的滚筒刷,靠在墙上。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 510, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/280.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/510/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/93/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/93/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/510/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/510/tie_2.png", "save_id": 93, "prompt_en": "Add a pink baby stroller to the bottom-right corner of the image.", "prompt_cn": "在图片的右下角加入一个粉色的婴儿车。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 511, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/288.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/511/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/95/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/95/15.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/511/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/511/tie_2.png", "save_id": 95, "prompt_en": "Place a ship on the ocean.", "prompt_cn": "在海上加入一艘轮船。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 512, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/418.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/512/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/104/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/104/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/512/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/512/loser.png", "save_id": 104, "prompt_en": "Add a passenger airplane flying in the sky above the embankment.", "prompt_cn": "在河堤上方的天空中添加一架正在飞行的客机", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 513, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/430.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/513/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/108/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/108/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/513/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/513/tie_2.png", "save_id": 108, "prompt_en": "Add a realistic statue of Albert Einstein standing to the right of the doorway.", "prompt_cn": "在大门右侧添加一尊爱因斯坦的真实感雕像。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 514, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/448.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/514/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/110/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/110/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/514/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/514/loser.png", "save_id": 110, "prompt_en": "Add a \"Starry Night\" painting by Van Gogh on the wooden wall above the bed.", "prompt_cn": "在床头上方的木质墙面上添加一幅梵高的《星月夜》画作。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 515, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Add_Copy/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/515/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/112/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/112/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/515/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/515/loser.png", "save_id": 112, "prompt_en": "Add another identical coffee mug next to it.", "prompt_cn": "在现有咖啡杯旁边再添加一个相同的咖啡杯。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 516, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Add_Copy/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/516/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/112/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/112/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/516/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/516/loser.png", "save_id": 112, "prompt_en": "Add another identical coffee mug next to it.", "prompt_cn": "在现有咖啡杯旁边再添加一个相同的咖啡杯。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 517, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Add_Copy/2.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/517/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/113/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/113/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/517/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/517/loser.png", "save_id": 113, "prompt_en": "Place an identical armchair opposite the existing one.", "prompt_cn": "在现有的扶手椅对面放置一把一模一样的扶手椅。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 518, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Add_Copy/2.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/518/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/113/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/113/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/518/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/518/loser.png", "save_id": 113, "prompt_en": "Place an identical armchair opposite the existing one.", "prompt_cn": "在现有的扶手椅对面放置一把一模一样的扶手椅。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 519, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Add_Copy/2.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/519/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/113/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/113/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/519/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/519/tie_2.png", "save_id": 113, "prompt_en": "Place an identical armchair opposite the existing one.", "prompt_cn": "在现有的扶手椅对面放置一把一模一样的扶手椅。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 520, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Add_Copy/6.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/520/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/117/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/117/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/520/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/520/loser.png", "save_id": 117, "prompt_en": "Add another identical picnic basket on the opposite corner.", "prompt_cn": "在毯子对角的另一角落添加一个相同的野餐篮。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 521, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Add_Copy/8.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/521/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/119/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/119/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/521/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/521/loser.png", "save_id": 119, "prompt_en": "Add an identical burger next to the existing one.", "prompt_cn": "在现在的汉堡旁边加入一个和现在汉堡一模一样的汉堡。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 522, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Add_Copy/8.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/522/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/119/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/119/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/522/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/522/loser.png", "save_id": 119, "prompt_en": "Add an identical burger next to the existing one.", "prompt_cn": "在现在的汉堡旁边加入一个和现在汉堡一模一样的汉堡。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 523, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Add_Copy/10.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/523/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/121/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/121/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/523/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/523/loser.png", "save_id": 121, "prompt_en": "Place a pillow identical to the one on the left side of the sofa in the right corner of the sofa.”", "prompt_cn": "在沙发右侧角落放置一个和沙发左侧相同的抱枕。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 524, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Add_Copy/11.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/524/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/122/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/122/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/524/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/524/loser.png", "save_id": 122, "prompt_en": "Add another identical smartphone next to the first one.", "prompt_cn": "在第一部智能手机旁边添加另一部相同的智能手机。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 525, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Add_Copy/12.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/525/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/123/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/123/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/525/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/525/loser.png", "save_id": 123, "prompt_en": "Add the matching boot next to it to complete the pair.", "prompt_cn": "在现有靴子旁边添加一只与之匹配的靴子,以组成一双。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 526, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Add_Copy/14.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/526/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/125/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/125/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/526/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/526/loser.png", "save_id": 125, "prompt_en": "Place another identical purple yoga mat next to it.", "prompt_cn": "在现有的紫色瑜伽垫旁边再放置一块相同的紫色瑜伽垫。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 527, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Add_Copy/16.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/527/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/127/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/127/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/527/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/527/tie_2.png", "save_id": 127, "prompt_en": "Hang another identical red lantern next to it.", "prompt_cn": "在它旁边再挂一个一模一样的红灯笼。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 528, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Add_Copy/16.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/528/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/127/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/127/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/528/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/528/loser.png", "save_id": 127, "prompt_en": "Hang another identical red lantern next to it.", "prompt_cn": "在它旁边再挂一个一模一样的红灯笼。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 529, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Add_Copy/17.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/529/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/128/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/128/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/529/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/529/loser.png", "save_id": 128, "prompt_en": "Build another identical snowman next to the first one.", "prompt_cn": "在第一个雪人旁边再堆一个相同的雪人。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 530, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Add_Copy/18.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/530/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/129/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/129/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/530/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/530/loser.png", "save_id": 129, "prompt_en": "Add another identical surfboard upright in the sand nearby.", "prompt_cn": "在附近的沙滩上再添加一块相同的冲浪板竖直立在沙中。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 531, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Add_Copy/20.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/531/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/131/14.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/ADD/131/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/531/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/531/loser.png", "save_id": 131, "prompt_en": "Copy an identical game console next to the current one", "prompt_cn": "在现在游戏机的旁边复制一个相同的游戏机。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 532, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/4.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/532/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/900/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/900/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/532/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/532/loser.png", "save_id": 900, "prompt_en": "Make the Minion hold a bouquet of flowers.", "prompt_cn": "让小黄人拿着一束花。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 533, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/5.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/533/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/901/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/901/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/533/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/533/loser.png", "save_id": 901, "prompt_en": "Make the boy look like he is talking on the phone.", "prompt_cn": "让男孩看起来好像正在打电话。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 534, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/8.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/534/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/904/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/904/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/534/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/534/loser.png", "save_id": 904, "prompt_en": "Have the dog place its paws on the computer and work hard.", "prompt_cn": "让这只狗把爪子放在电脑上,看起来在努力工作。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 535, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/11.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/535/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/907/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/907/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/535/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/535/loser.png", "save_id": 907, "prompt_en": "Have the girl grip the lat pulldown machine with her hands.", "prompt_cn": "让这个女孩用手握住高位下拉的器械。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 536, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/15.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/536/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/911/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/911/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/536/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/536/loser.png", "save_id": 911, "prompt_en": "Make the girl point both of her hands to the left.", "prompt_cn": "让女孩把双手都指向左侧。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 537, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/15.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/537/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/911/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/911/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/537/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/537/loser.png", "save_id": 911, "prompt_en": "Make the girl point both of her hands to the left.", "prompt_cn": "让女孩把双手都指向左侧。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 538, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/16.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/538/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/912/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/912/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/538/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/538/tie_2.png", "save_id": 912, "prompt_en": "Make the girl open her eyes and sit up.", "prompt_cn": "让这个女孩睁开眼睛并坐起来。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 539, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/19.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/539/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/915/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/915/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/539/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/539/loser.png", "save_id": 915, "prompt_en": "Have the girl pick up and hold the ball in front of her.", "prompt_cn": "让女孩抱起她面前的球。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 540, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/19.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/540/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/915/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/915/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/540/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/540/loser.png", "save_id": 915, "prompt_en": "Have the girl pick up and hold the ball in front of her.", "prompt_cn": "让女孩抱起她面前的球。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 541, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/22.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/541/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/917/14.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/917/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/541/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/541/loser.png", "save_id": 917, "prompt_en": "Have the girl raise her right hand.", "prompt_cn": "让这个女孩举起来右手。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 542, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/23.jpeg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/542/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/918/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/918/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/542/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/542/loser.png", "save_id": 918, "prompt_en": "Have the girl sit on the ground.", "prompt_cn": "让这个小女孩坐在地上。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 543, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/29.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/543/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/924/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/924/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/543/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/543/loser.png", "save_id": 924, "prompt_en": "Adjust the pose of the man on the left so that he is holding the microphone right next to his mouth, as if he is speaking.", "prompt_cn": "调整左边那个男人的姿势,让他把话筒举到嘴边,好像正在说话。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 544, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/30.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/544/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/925/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/925/12.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/544/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/544/loser.png", "save_id": 925, "prompt_en": "Have the boy cross his legs.", "prompt_cn": "让这个男生跷起二郎腿。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 545, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/31.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/545/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/926/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/926/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/545/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/545/loser.png", "save_id": 926, "prompt_en": "Adjust the girl’s pose so that she is looking straight ahead.", "prompt_cn": "让这个女生向前看.", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 546, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/33.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/546/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/928/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/928/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/546/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/546/loser.png", "save_id": 928, "prompt_en": "Make the girl look up at the sky.", "prompt_cn": "让女孩抬头仰望天空。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 547, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/34.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/547/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/929/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/929/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/547/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/547/loser.png", "save_id": 929, "prompt_en": "Have the man wave goodbye to his friend.", "prompt_cn": "让这位男士挥手向朋友告别。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 548, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/38.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/548/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/933/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/933/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/548/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/548/tie_2.png", "save_id": 933, "prompt_en": "Make the girl in the act of bending down to pick up the toy.", "prompt_cn": "让这个女孩正在弯腰捡地上的玩具。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 549, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/41.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/549/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/936/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/936/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/549/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/549/loser.png", "save_id": 936, "prompt_en": "Make the motorcycle do a wheelie.", "prompt_cn": "让这辆摩托车做一个翘头动作。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 550, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/42.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/550/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/937/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/937/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/550/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/550/loser.png", "save_id": 937, "prompt_en": "Make the girl driving the bumper car touch her hair with her left hand.", "prompt_cn": "让这个驾驶碰碰车的女孩用左手摸自己的头发", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 551, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/46.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/551/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/941/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/941/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/551/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/551/tie_2.png", "save_id": 941, "prompt_en": "Have this swimmer touch their goggles.", "prompt_cn": "让这名游泳运动员摸自己的泳镜", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 552, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/47.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/552/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/942/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/942/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/552/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/552/loser.png", "save_id": 942, "prompt_en": "Have the athlete wearing the number 10 jersey sit on the ground.", "prompt_cn": "让这名穿着 10 号球衣的运动员坐在地上。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 553, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/49.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/553/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/944/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/944/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/553/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/553/loser.png", "save_id": 944, "prompt_en": "Have the cat lie down on the lawn.", "prompt_cn": "让这只猫卧在草坪上。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 554, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/50.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/554/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/945/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/945/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/554/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/554/loser.png", "save_id": 945, "prompt_en": "Make the waiter bow.", "prompt_cn": "让服务员鞠躬。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 555, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/52.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/555/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/947/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/947/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/555/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/555/loser.png", "save_id": 947, "prompt_en": "Make her point her finger towards the distance.", "prompt_cn": "让她用手指向远方。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 556, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/52.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/556/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/947/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/947/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/556/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/556/tie_2.png", "save_id": 947, "prompt_en": "Make her point her finger towards the distance.", "prompt_cn": "让她用手指向远方。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 557, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/53.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/557/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/948/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/948/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/557/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/557/loser.png", "save_id": 948, "prompt_en": "Make the boxer throw a straight punch.", "prompt_cn": "让这名拳击手打出一记直拳。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 558, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/58.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/558/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/953/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/953/15.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/558/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/558/loser.png", "save_id": 953, "prompt_en": "Have the boy give a thumbs-up with his right hand.", "prompt_cn": "让这个男孩用右手竖起大拇指。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 559, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/62.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/559/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/957/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/957/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/559/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/559/tie_2.png", "save_id": 957, "prompt_en": "Perform a kickflip on a skateboard.", "prompt_cn": "让这个人在滑板上做一个 kickflip 动作。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 560, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/65.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/560/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/960/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/960/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/560/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/560/loser.png", "save_id": 960, "prompt_en": "Have this boy kneeling on one knee.", "prompt_cn": "让这个男生单膝下跪", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 561, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/66.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/561/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/961/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/961/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/561/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/561/loser.png", "save_id": 961, "prompt_en": "Have this girl extend her arms.", "prompt_cn": "让这个女生伸开双臂", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 562, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/67.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/562/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/962/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/962/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/562/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/562/tie_2.png", "save_id": 962, "prompt_en": "Have this girl make a finger heart gesture.", "prompt_cn": "让这个女生做出比心的动作", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 563, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/70.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/563/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/965/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Action/965/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/563/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/563/loser.png", "save_id": 965, "prompt_en": "Have this boy take a starting position for a 100-meter sprint.", "prompt_cn": "让这个男生做出百米起跑的动作。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 564, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/76.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/564/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/827/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/827/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/564/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/564/loser.png", "save_id": 827, "prompt_en": "Keep the globe and all dolls unchanged, and change the background to a night sky with a clear Milky Way and nebulae.", "prompt_cn": "保持地球仪和所有玩偶不变,将背景改为夜晚星空,有清晰的银河和星云。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 565, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/92.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/565/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/831/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/831/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/565/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/565/tie_2.png", "save_id": 831, "prompt_en": "Keep the child, kite, and bicycle unchanged, and change the background to snow‑covered mountains under a clear blue sky.", "prompt_cn": "保持小孩、风筝和自行车不变,将背景改为在晴朗蓝天下的雪山景色。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 566, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/105.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/566/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/833/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/833/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/566/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/566/loser.png", "save_id": 833, "prompt_en": "Keep the fruits and bowl unchanged, and change the background to an indoor kitchen countertop with cabinets.", "prompt_cn": "保持水果和碗不变,将背景改为室内厨房料理台和橱柜。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 567, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/158.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/567/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/843/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/843/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/567/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/567/loser.png", "save_id": 843, "prompt_en": "Keep the lighthouse unchanged and change the background to a busy modern harbor with container ships and cranes.", "prompt_cn": "保持灯塔不变,将背景改为有集装箱船和起重机的繁忙现代港口。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 568, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/204.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/568/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/846/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/846/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/568/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/568/tie_2.png", "save_id": 846, "prompt_en": "Keep the girl unchanged and change the background to a desert with golden sand dunes covering the ground.", "prompt_cn": "保持女子不变,将背景改为沙漠,地面覆盖着金黄色的沙丘。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 569, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/210.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/569/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/847/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/847/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/569/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/569/loser.png", "save_id": 847, "prompt_en": "Keep the man and the car unchanged, and change the background to a desert highway with sand dunes and a clear blue sky.", "prompt_cn": "保持人物和汽车不变,将背景改为沙丘起伏、蓝天晴朗的沙漠公路。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 570, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/290.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/570/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/849/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/849/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/570/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/570/loser.png", "save_id": 849, "prompt_en": "Keep the shopping cart unchanged, and change the background to the interior of a supermarket.", "prompt_cn": "保持购物车不变,将背景更换为超市内部场景。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 571, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/381.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/571/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/853/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/853/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/571/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/571/loser.png", "save_id": 853, "prompt_en": "Change background to a sunny forest clearing filled with wildflowers.", "prompt_cn": "将背景改为有野花的阳光森林空地", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 572, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/385.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/572/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/856/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/856/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/572/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/572/tie_2.png", "save_id": 856, "prompt_en": "Change the dog’s background to a spring park with blooming cherry blossoms.", "prompt_cn": "将这只戴眼镜的狗移动到春季樱花盛开的公园中。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 573, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/387.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/573/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/858/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/858/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/573/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/573/loser.png", "save_id": 858, "prompt_en": "Keep the table and the flowers and items on it unchanged, and change the background to a Parisian street-side outdoor café.", "prompt_cn": "保持桌子和上面的花和物品不变,背景改为巴黎街边露天咖啡馆。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 574, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/398.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/574/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/864/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/864/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/574/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/574/loser.png", "save_id": 864, "prompt_en": "Move the astronaut to a golden beach at sunset with gentle ocean waves.", "prompt_cn": "将宇航员移到日落时分的金色沙滩上", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 575, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/402.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/575/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/868/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/868/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/575/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/575/tie_2.png", "save_id": 868, "prompt_en": "Keep the cyclist and the bicycle she is carrying unchanged, and change the sunny countryside road, stone wall, and café background to a winding coastal highway at sunset.", "prompt_cn": "保持骑车人和她装载的自行车不变,将阳光明媚的乡村道路、石墙和咖啡馆背景更改为日落时分蜿蜒的沿海公路。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 576, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/413.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/576/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/877/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/877/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/576/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/576/loser.png", "save_id": 877, "prompt_en": "Keep the lifebuoy as the main subject unchanged, and change the original background in the image to a brightly lit indoor swimming poolside.", "prompt_cn": "保持救生圈主体不变,将图片中原有的背景更改为一个光线明亮的室内游泳馆池边。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 577, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/415.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/577/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/879/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/879/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/577/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/577/loser.png", "save_id": 879, "prompt_en": "Keep the sanitation worker wearing a fluorescent yellow jacket and the tool-filled cleaning cart he is pushing unchanged, and change the cobblestone street and brick building background in the image to a lively open-air market.", "prompt_cn": "保持穿着荧光黄夹克的环卫工人和他推着的装满工具的清洁车不变,将图片中鹅卵石街道和砖砌建筑的背景更改为一个热闹的露天市场。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 578, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/421.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/578/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/883/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/883/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/578/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/578/loser.png", "save_id": 883, "prompt_en": "Keep the person on the horse unchanged, and change the background to a golden beach at sunset.", "prompt_cn": "保持骑马的人不变,将背景改为夕阳下的金色海滩岸边。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 579, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/425.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/579/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/885/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/885/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/579/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/579/loser.png", "save_id": 885, "prompt_en": "Keep SpongeBob unchanged, and change the indoor scene to a sunny beach with the sea.", "prompt_cn": "保持海绵宝宝不变,将室内场景改成阳光明媚的沙滩和大海。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 580, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/434.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/580/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/890/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/890/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/580/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/580/loser.png", "save_id": 890, "prompt_en": "Keep the dog unchanged, and change the background to a snowy forest with tall pine trees.", "prompt_cn": "保持小狗不变,将背景改为有高大松树的雪树林。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 581, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/436.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/581/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/891/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/891/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/581/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/581/tie_2.png", "save_id": 891, "prompt_en": "Keep the table and chairs unchanged, and change the background to a modern living room with floor-to-ceiling windows.", "prompt_cn": "保持桌椅不变,将背景改为带落地窗的现代家庭客厅。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 582, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/436.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/582/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/891/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/891/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/582/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/582/loser.png", "save_id": 891, "prompt_en": "Keep the table and chairs unchanged, and change the background to a modern living room with floor-to-ceiling windows.", "prompt_cn": "保持桌椅不变,将背景改为带落地窗的现代家庭客厅。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 583, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/462.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/583/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/895/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/895/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/583/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/583/tie_2.png", "save_id": 895, "prompt_en": "Keep the vehicle unchanged and replace the background with an auto exhibition center.", "prompt_cn": "保持车辆不变,将背景改为在车览中心。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 584, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/294.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/584/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/603/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/603/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/584/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/584/loser.png", "save_id": 603, "prompt_en": "Keep the six emoticon balls unchanged, and change the background to a white leather sofa.", "prompt_cn": "保持六个表情球不变,将背景改在白色的皮质沙发上。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 585, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/294.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/585/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/603/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Background/603/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/585/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/585/tie_2.png", "save_id": 603, "prompt_en": "Keep the six emoticon balls unchanged, and change the background to a white leather sofa.", "prompt_cn": "保持六个表情球不变,将背景改在白色的皮质沙发上。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 586, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Change_material/1.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/586/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/773/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/773/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/586/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/586/loser.png", "save_id": 773, "prompt_en": "Change the sofa material to red velvet.", "prompt_cn": "将沙发的材质更改为红色天鹅绒。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 587, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Change_material/9.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/587/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/781/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/781/15.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/587/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/587/loser.png", "save_id": 781, "prompt_en": "Change the carpet to woven velvet.", "prompt_cn": "把地毯变成编织丝绒材质。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 588, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Change_material/11.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/588/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/782/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/782/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/588/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/588/tie_2.png", "save_id": 782, "prompt_en": "Make the coffee table out of glass.", "prompt_cn": "将咖啡桌改为玻璃材质。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 589, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Change_material/14.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/589/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/784/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/784/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/589/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/589/loser.png", "save_id": 784, "prompt_en": "Change the dining table to walnut wood.", "prompt_cn": "将餐桌改为胡桃木的。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 590, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Change_material/15.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/590/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/785/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/785/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/590/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/590/loser.png", "save_id": 785, "prompt_en": "Change the bedside table to pine wood.", "prompt_cn": "将床头柜改成松木的。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 591, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Change_material/16.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/591/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/786/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/786/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/591/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/591/loser.png", "save_id": 786, "prompt_en": "Turn the table into a metal one.", "prompt_cn": "将这张桌子变成一张金属桌子。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 592, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Change_material/19.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/592/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/789/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/789/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/592/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/592/loser.png", "save_id": 789, "prompt_en": "Replace the current bathroom vanity with one made of stone.", "prompt_cn": "将现有的洗漱台换成石质的。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 593, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Change_material/37.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/593/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/800/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/800/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/593/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/593/loser.png", "save_id": 800, "prompt_en": "Change the hat to be made of straw.", "prompt_cn": "把帽子改成由草制成的。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 594, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Change_material/38.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/594/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/801/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/801/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/594/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/594/loser.png", "save_id": 801, "prompt_en": "Replace the strap with metal links.", "prompt_cn": "将手表的表带更换为金属链式表带。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 595, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Change_material/41.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/595/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/804/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/804/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/595/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/595/loser.png", "save_id": 804, "prompt_en": "Turn the tumbler into plastic.", "prompt_cn": "将该随行杯改为由塑料制成。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 596, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Change_material/45.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/596/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/807/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/807/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/596/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/596/loser.png", "save_id": 807, "prompt_en": "Change the teapot to porcelain.", "prompt_cn": "将茶壶更换为瓷质茶壶。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 597, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Change_material/64.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/597/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/821/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/821/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/597/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/597/loser.png", "save_id": 821, "prompt_en": "Change the toy car to plastic.", "prompt_cn": "将玩具车改为塑料材质。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 598, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Change_material/66.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/598/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/823/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_Material/823/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/598/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/598/loser.png", "save_id": 823, "prompt_en": "Change this airplane to be made of metal.", "prompt_cn": "把这个飞机改为金属的。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 599, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/122.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/599/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/553/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/553/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/599/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/599/loser.png", "save_id": 553, "prompt_en": "Resize the hot air balloon on the right to match the size of the one on the left.", "prompt_cn": "将右侧的热气球调整为与左侧热气球相同的大小。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 600, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/122.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/600/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/553/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/553/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/600/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/600/loser.png", "save_id": 553, "prompt_en": "Resize the hot air balloon on the right to match the size of the one on the left.", "prompt_cn": "将右侧的热气球调整为与左侧热气球相同的大小。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 601, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/137.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/601/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/556/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/556/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/601/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/601/loser.png", "save_id": 556, "prompt_en": "Make the stuffed bunny on the bed twice as large.", "prompt_cn": "将床上的毛绒兔子尺寸放大为原来的两倍。\n", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 602, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/142.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/602/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/558/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/558/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/602/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/602/loser.png", "save_id": 558, "prompt_en": "Change the color of the plant question-mark sculpture to bright yellow.", "prompt_cn": "将植物问号雕塑的颜色改为亮黄色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 603, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/155.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/603/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/559/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/559/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/603/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/603/loser.png", "save_id": 559, "prompt_en": "Change the armchair on the right to light green.", "prompt_cn": "将右边的扶手椅改为浅绿色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 604, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/155.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/604/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/559/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/559/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/604/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/604/loser.png", "save_id": 559, "prompt_en": "Change the armchair on the right to light green.", "prompt_cn": "将右边的扶手椅改为浅绿色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 605, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/159.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/605/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/560/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/560/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/605/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/605/loser.png", "save_id": 560, "prompt_en": "Change the sofa color from mustard yellow to deep emerald green.", "prompt_cn": "将沙发的颜色从芥末黄改为深翡翠绿。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 606, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/170.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/606/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/561/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/561/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/606/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/606/tie_2.png", "save_id": 561, "prompt_en": "Change the blue jumpsuit to pink.", "prompt_cn": "将蓝色连体衣变成粉色的。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 607, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/177.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/607/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/563/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/563/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/607/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/607/loser.png", "save_id": 563, "prompt_en": "Change the color of the shoes to sky blue.", "prompt_cn": "将鞋子的颜色改为天空蓝。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 608, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/206.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/608/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/567/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/567/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/608/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/608/loser.png", "save_id": 567, "prompt_en": "Change the height of the chair next to the desk to 2/3 of its current height.", "prompt_cn": "将图中桌子旁的椅子高度改为当前的2/3。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 609, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/224.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/609/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/569/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/569/12.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/609/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/609/loser.png", "save_id": 569, "prompt_en": "Double the area of the sign on the wall.", "prompt_cn": "将墙上牌子的面积改为当前的两倍。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 610, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/224.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/610/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/569/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/569/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/610/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/610/loser.png", "save_id": 569, "prompt_en": "Double the area of the sign on the wall.", "prompt_cn": "将墙上牌子的面积改为当前的两倍。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 611, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/224.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/611/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/569/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/569/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/611/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/611/tie_2.png", "save_id": 569, "prompt_en": "Double the area of the sign on the wall.", "prompt_cn": "将墙上牌子的面积改为当前的两倍。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 612, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/251.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/612/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/574/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/574/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/612/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/612/loser.png", "save_id": 574, "prompt_en": "Change the toothbrush handle to a solid, vivid sapphire blue color.", "prompt_cn": "将牙刷刷柄改成纯正的宝石蓝色", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 613, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/261.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/613/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/582/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/582/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/613/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/613/loser.png", "save_id": 582, "prompt_en": "Change the red popcorn pot to a solid pure blue color.", "prompt_cn": "将红色爆米花锅改成纯蓝色外观", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 614, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/269.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/614/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/588/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/588/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/614/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/614/loser.png", "save_id": 588, "prompt_en": "Change the toilet's color to a clean solid sky blue shade.", "prompt_cn": "将马桶的颜色改成纯正天蓝色", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 615, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/270.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/615/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/589/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/589/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/615/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/615/loser.png", "save_id": 589, "prompt_en": "Change the bookshelf color from terracotta to matte black.", "prompt_cn": "把书架从砖红色改为哑光黑。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 616, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/271.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/616/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/590/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/590/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/616/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/616/loser.png", "save_id": 590, "prompt_en": "Change the sofa to a light blue color.", "prompt_cn": "让沙发变成淡蓝色的", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 617, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/275.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/617/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/592/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/592/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/617/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/617/loser.png", "save_id": 592, "prompt_en": "Resize the rocket toy to match the height of the adjacent backpack.", "prompt_cn": "让这个火箭玩具缩小到和旁边的书包一样高。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 618, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/279.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/618/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/595/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/595/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/618/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/618/loser.png", "save_id": 595, "prompt_en": "Enlarge the plant in the image to be as tall as the door handle.", "prompt_cn": "将图中的植物长大到和门把手一样高。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 619, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/285.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/619/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/597/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/597/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/619/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/619/loser.png", "save_id": 597, "prompt_en": "Enlarge the wicker picnic basket on the motorcycle’s rear seat to about twice its original size.", "prompt_cn": "把摩托车后座上的藤编野餐篮尺寸增大到大约原来的两倍。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 620, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/289.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/620/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/600/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/600/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/620/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/620/loser.png", "save_id": 600, "prompt_en": "Make the dress of the girl on the left longer so that it covers the area above the knees.", "prompt_cn": "让左边女孩的裙子变长,覆盖膝盖以上部分。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 621, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/289.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/621/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/601/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/601/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/621/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/621/loser.png", "save_id": 601, "prompt_en": "Change the helmet in the image to white.", "prompt_cn": "将图中的头盔变为白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 622, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/296.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/622/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/604/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/604/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/622/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/622/loser.png", "save_id": 604, "prompt_en": "Enlarge the orange on the left so that it matches the size of the lemon.", "prompt_cn": "放大左侧的橙子,使其与柠檬的大小相匹配。”", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 623, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/298.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/623/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/606/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/606/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/623/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/623/loser.png", "save_id": 606, "prompt_en": "Resize the apple to be the same size as the orange.", "prompt_cn": "将苹果缩小到和橘子一样大。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 624, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/300.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/624/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/607/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Change_color_size/607/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/624/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/624/loser.png", "save_id": 607, "prompt_en": "Resize the yellow trash can to half of its current height.", "prompt_cn": "将黄色的垃圾桶缩小到现在一半的高度。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 625, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/205.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/625/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/301/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/301/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/625/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/625/tie_2.png", "save_id": 301, "prompt_en": "Add the black text 'Machine Learning' in the center of the laptop screen, remove the circular ornament on the left, place an open heavy dictionary in front of the laptop, and remove the stone in the lower-left corner of the desk.", "prompt_cn": "在笔记本电脑屏幕中央添加一句黑色文字‘Machine Learning’,将左侧圆环形摆件移除,在笔记本电脑前面加入一本翻开的厚重词典,移除桌面左前方的石头。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 626, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/235.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/626/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/304/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/304/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/626/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/626/tie_2.png", "save_id": 304, "prompt_en": "Turn on the TV on the wall, showing a Tom and Jerry cartoon. Write 'Successful' on the wall above the TV. Remove the clock in the image, and change all the stools to blue.", "prompt_cn": "打开墙上的电视,显示猫和老鼠的动画片,在电视上方的墙上写上‘Successful’,移除图中的时钟,将所有凳子改为蓝色。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 627, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/303.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/627/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/306/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/306/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/627/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/627/loser.png", "save_id": 306, "prompt_en": "Have the girl pick up the water bottle in the center and drink from it, change the pants to the same color as the top, and add a white towel around her neck.", "prompt_cn": "让这个女生拿起中间的水瓶喝水,将裤子改为与上衣相同的颜色,并在脖子上挂一条白色毛巾。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 628, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/309.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/628/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/311/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/311/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/628/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/628/tie_2.png", "save_id": 311, "prompt_en": "Add a park sign next to the path with the text 'Sunset Research Park', change the bench to white and add a person reading on it, and place a tent on the lawn.", "prompt_cn": "在小路旁添加一块公园指示牌,牌子上写‘Sunset Research Park’,将长椅改为白色并在上面添加一位读书的人,在草坪上加入一个帐篷。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 629, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/329.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/629/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/323/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/323/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/629/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/629/tie_2.png", "save_id": 323, "prompt_en": "Add faint text saying 'Wildlife Research 2024' in the sky, place a standing deer observing in the foreground, and add a white backpack under the tree.", "prompt_cn": "在天空中加入淡淡的文字‘Wildlife Research 2024’,在前景添加一只站立观望的鹿,并在树下添加一只白色书包。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 630, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/334.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/630/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/325/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/325/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/630/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/630/loser.png", "save_id": 325, "prompt_en": "Add a watch to the boy's right wrist, remove the book on the table and the spoon in the plate, and refill the coffee cup with coffee.", "prompt_cn": "将男孩的右手腕上戴上手表,移除桌子上的书和盘子中的勺子,将咖啡杯里的咖啡续满。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 631, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/341.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/631/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/328/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/328/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/631/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/631/tie_2.png", "save_id": 328, "prompt_en": "Change the groom's suit color to white. Hang a large sign on the left side of the image with the text \"I love you\" in a neon light style.", "prompt_cn": "将新郎的西装颜色改为白色。在图像左侧悬挂一块尺寸很大的招牌,招牌上写着 “I love you”,字体采用霓虹灯风格。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 632, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/342.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/632/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/329/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/329/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/632/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/632/loser.png", "save_id": 329, "prompt_en": "Change the “Massage” text on the black sachet to “Relax”, recolor the bag to deep purple, and add a small lit candle beside it.", "prompt_cn": "将黑色香薰袋上的“Massage”文字改为“Relax”,把袋子颜色改成深紫色,并在袋子旁边添加一支点燃的小蜡烛", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 633, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/345.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/633/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/331/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/331/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/633/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/633/tie_2.png", "save_id": 331, "prompt_en": "Change the girl's T-shirt to yellow and have her display a very confused or puzzled expression and pose.", "prompt_cn": "将女孩的T恤改为黄色,并让她表现出非常困惑或不解的表情和动作。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 634, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/349.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/634/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/335/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/335/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/634/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/634/loser.png", "save_id": 335, "prompt_en": "Change her top to white, add a small sailboat on the sea in the background, and have the girl wear the hat on her head.", "prompt_cn": "将她的上衣改为白色,在背景的海面上添加一艘小帆船,并让女生将帽子戴在头上。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 635, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/360.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/635/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/342/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/342/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/635/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/635/loser.png", "save_id": 342, "prompt_en": "Place a Barbie doll toy on the chair closer to the camera, and add a Patrick Star toy on the chair farther from the camera.", "prompt_cn": "在离镜头更近的椅子上添加一个芭比娃娃的玩具,离镜头更远的椅子上加入一个派大星的玩具。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 636, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/365.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/636/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/346/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/346/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/636/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/636/tie_2.png", "save_id": 346, "prompt_en": "Make the girl’s expression happy and change her top to pink.", "prompt_cn": "让这个女孩的表情变得开心,并将她的上衣改为粉色。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 637, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/365.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/637/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/347/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/347/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/637/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/637/loser.png", "save_id": 347, "prompt_en": "Have the girl place both feet on the ground, display a helpless gesture and expression, and change her top to yellow.", "prompt_cn": "让这个女孩双脚放在地面上,做出无奈的动作和表情,并将上衣改为黄色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 638, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/367.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/638/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/349/14.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/349/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/638/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/638/loser.png", "save_id": 349, "prompt_en": "Change the spoon to silver, replace the coffee in the cup with tea, and remove the small gold-wrapped chocolates.", "prompt_cn": "将勺子改为银色,将杯子里的咖啡替换为茶水,并移除小金色包装的巧克力。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 639, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/368.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/639/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/350/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/350/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/639/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/639/loser.png", "save_id": 350, "prompt_en": "Add a red bus on the road, change the number on the speed limit sign to '20', and set the time to a sunny noon.", "prompt_cn": "在道路上添加一辆红色巴士,将限速牌上的数字改为‘20’,并将时间调整为阳光明媚的中午。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 640, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/369.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/640/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/351/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/351/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/640/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/640/tie_2.png", "save_id": 351, "prompt_en": "Remove the scarf from the girl and have her sit on the sofa, replace the green plant in the image with a guitar, and add a blue robot vacuum on the floor.", "prompt_cn": "移除这个女孩的围脖,并让她坐在沙发上,将图像中的绿植替换为吉他,并在地上添加一个蓝色的扫地机器人。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 641, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/370.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/641/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/352/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/352/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/641/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/641/loser.png", "save_id": 352, "prompt_en": "Add a table in the center of the image, arrange four chairs around the table, place four cups of coffee on the table, and put a sign on the window that reads 'Welcome'.", "prompt_cn": "在图片中央添加一张桌子,围绕桌子摆放四把椅子,桌子上放置四杯咖啡,在窗户上贴一个指示牌,牌子上写‘Welcome’。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 642, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/373.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/642/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/355/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/355/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/642/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/642/tie_2.png", "save_id": 355, "prompt_en": "Remove the TV on the wall, add a chandelier near the ceiling light, change the wooden desk to glass, and place a laptop on the desk.", "prompt_cn": "移除墙上的电视,在天花板灯光旁添加一个吊灯,将木质办公桌改为玻璃材质,并在办公桌上添加一台笔记本电脑。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 643, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/374.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/643/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/356/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/356/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/643/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/643/loser.png", "save_id": 356, "prompt_en": "Replace the microphone in the girl's hand with an ice cream, change the suit color to emerald green, and add a potted plant next to the large speaker.", "prompt_cn": "将女生手中的麦克风换成冰淇淋,将西装颜色改为翠绿色,并在大音箱旁添加一盆盆栽。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 644, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/441.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/644/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/364/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/364/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/644/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/644/tie_2.png", "save_id": 364, "prompt_en": "Remove the towel in the bottom left corner and add an electric toothbrush in the empty space on the right, turned on.", "prompt_cn": "删除左下角的毛巾,并在右侧空白处添加一支打开的电动牙刷。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 645, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/444.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/645/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/366/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/366/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/645/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/645/loser.png", "save_id": 366, "prompt_en": "Change the headphone case in the image to black and the headphones to sky blue.", "prompt_cn": "将图像中的耳机盒变为黑色,耳机变为天蓝色", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 646, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/445.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/646/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/367/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/367/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/646/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/646/loser.png", "save_id": 367, "prompt_en": "Swap the colors of the two doors and add a flower-filled hanging planter on the outer wall of the window on the left side.", "prompt_cn": "交换左右两扇门的颜色并且在左侧窗户外墙上添加一个装满鲜花的挂式花箱。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 647, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/447.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/647/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/369/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/369/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/647/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/647/loser.png", "save_id": 369, "prompt_en": "Change the sink color to blue and replace the mirror with a round mirror.", "prompt_cn": "将洗手台颜色改为蓝色,并将镜子替换为圆形镜子。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 648, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/449.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/648/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/372/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/372/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/648/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/648/loser.png", "save_id": 372, "prompt_en": "Move the silver mirror to the wall on the left side of the door, add a whiteboard above the door with the artistic text 'HOME SWEET HOME', change the door color to a bright red, and add a pink suitcase to the right of the door.", "prompt_cn": "将银色镜子移到门的左侧墙面,在门的上方添加一块白板,并在白板上写上艺术字体的‘HOME SWEET HOME’,同时将门的颜色改为鲜艳的红色,并在门的右侧添加一个粉色的旅行箱。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 649, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/451.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/649/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/373/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/373/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/649/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/649/loser.png", "save_id": 373, "prompt_en": "Move the chair to the front of the stairs and turn it blue.", "prompt_cn": "把椅子移到楼梯前面,并把它变成蓝色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 650, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/451.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/650/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/374/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/374/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/650/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/650/tie_2.png", "save_id": 374, "prompt_en": "Have a person sit on the chair reading a book, replace the vase with a table lamp, and add a kitten on the stairs.", "prompt_cn": "让一个人坐在椅子上看书,把花瓶换成台灯,并在楼梯上加入一只小猫。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 651, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/452.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/651/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/376/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/376/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/651/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/651/loser.png", "save_id": 376, "prompt_en": "Turn on the air conditioner, add a television below it, and remove the table lamp from the scene.", "prompt_cn": "打开图中的空调,在空调下方添加一台电视,并移除画面中的台灯。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 652, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/454.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/652/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/377/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/377/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/652/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/652/loser.png", "save_id": 377, "prompt_en": "Remove the umbrella and write the text \"Winter is Coming\" in the sky.", "prompt_cn": "移除那把雨伞,并在天空中写上文字“Winter is Coming”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 653, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/461.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/653/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/381/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/381/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/653/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/653/loser.png", "save_id": 381, "prompt_en": "Change the green paint on the wall to pink, add the text 'Love Home', and replace the girl's pants with shorts.", "prompt_cn": "将墙上的绿色油漆变成粉色,并写上文字‘Love Home’,将女生的裤子换成短裤。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 654, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/465.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/654/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/383/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/383/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/654/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/654/tie_2.png", "save_id": 383, "prompt_en": "Change the number displayed on the phone to '2026.5201314', recolor the pink paper to sky blue, remove the percentage symbol, and add an eraser to the left of the phone.", "prompt_cn": "将手机屏幕上的数字修改为‘2026.5201314’,将粉色纸张改为天蓝色,移除百分号,并在手机左侧添加一个橡皮擦。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 655, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/472.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/655/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/387/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/387/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/655/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/655/loser.png", "save_id": 387, "prompt_en": "Change the white self-balancing scooter to red and move it to the right side of the image.", "prompt_cn": "将那辆白色的小平衡车变为红色并且移动到图像的右边。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 656, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/474.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/656/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/388/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/388/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/656/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/656/tie_2.png", "save_id": 388, "prompt_en": "Place a sign reading 'Dream House' in the center of the lawn and add a cardboard box to the bottom-right corner.", "prompt_cn": "在草坪中间竖立一块写着‘Dream House’的牌子,并在右下角添加一个纸箱。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 657, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/481.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/657/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/394/14.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/394/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/657/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/657/loser.png", "save_id": 394, "prompt_en": "Remove the lemons outside the bowl, change the man's shirt color to blue, and have the girl pick up a lemon from the bowl and hand it to the man.", "prompt_cn": "移除碗外的柠檬,将男人的衬衫颜色改为蓝色,并让女孩从碗中拿起一个柠檬递给男人。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 658, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/494.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/658/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/403/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/403/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/658/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/658/loser.png", "save_id": 403, "prompt_en": "Remove the laptop from the desk and add a marble coffee table in the center of the image.", "prompt_cn": "移除书桌上的笔记本电脑,并在图像中央添加一个大理石茶几。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 659, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/496.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/659/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/404/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/404/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/659/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/659/loser.png", "save_id": 404, "prompt_en": "Remove the suitcase and make the woman sit on the rug reading a book.", "prompt_cn": "移除行李箱,让女性坐在地毯上读书。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 660, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/9.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/660/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/410/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/410/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/660/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/660/loser.png", "save_id": 410, "prompt_en": "Have the girl pick up the two dumbbells near her feet and perform a bench press exercise. Change her top to a short-sleeved shirt and her shoes to blue.", "prompt_cn": "让女孩捡起脚边的两个哑铃并进行卧推动作,将上衣改为短袖,并把鞋子改为蓝色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 661, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/18.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/661/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/414/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/414/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/661/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/661/loser.png", "save_id": 414, "prompt_en": "Have the cat raise its head and look straight ahead. Swap the two bowls on the table, place a power strip in front of the bowls, and remove the bowl on the left after the swap.", "prompt_cn": "让这只猫抬起头并直视前方,交换桌子上的两个碗,在碗的前方放置一个插线板,交换完成后移除左侧的碗。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 662, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Action/28.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/662/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/417/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/417/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/662/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/662/tie_2.png", "save_id": 417, "prompt_en": "Remove the table lamp from the image, shorten the coffee straw to two-thirds of its current length, and have the girl drinking the coffee.", "prompt_cn": "移除图中的台灯,将咖啡吸管的长度缩短为当前的 2/3,并让女孩正在喝咖啡。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 663, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/4.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/663/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/424/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Complex/424/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/663/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/663/loser.png", "save_id": 424, "prompt_en": "Change the chair to green, move it to the right side of the sofa, and add a fruit box on the table.", "prompt_cn": "将椅子改为绿色,并移动到沙发右侧,同时在桌子上新增一个水果盒。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 664, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/emotion_change/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/664/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Emotion_Change/1179/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Emotion_Change/1179/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/664/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/664/tie_2.png", "save_id": 1179, "prompt_en": "Replace the smiling balloon with a frowning one.", "prompt_cn": "将微笑的气球替换为一个皱眉的气球。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 665, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/emotion_change/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/665/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Emotion_Change/1180/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Emotion_Change/1180/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/665/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/665/tie_2.png", "save_id": 1180, "prompt_en": "Make the cat look very fierce.”", "prompt_cn": "让这只猫看起来非常凶猛。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 666, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/emotion_change/6.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/666/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Emotion_Change/1183/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Emotion_Change/1183/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/666/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/666/tie_2.png", "save_id": 1183, "prompt_en": "Make the baby laughing happily.", "prompt_cn": "让婴儿开心地笑起来。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 667, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/emotion_change/13.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/667/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Emotion_Change/1187/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Emotion_Change/1187/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/667/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/667/loser.png", "save_id": 1187, "prompt_en": "Make him look relieved and happy.", "prompt_cn": "让他看起来放松、如释重负并且开心。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 668, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/emotion_change/21.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/668/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Emotion_Change/1192/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Emotion_Change/1192/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/668/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/668/loser.png", "save_id": 1192, "prompt_en": "Make the superhero look defeated and sad.", "prompt_cn": "让这位超级英雄看起来被打败并且很悲伤。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 669, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/emotion_change/25.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/669/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Emotion_Change/1194/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Emotion_Change/1194/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/669/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/669/tie_2.png", "save_id": 1194, "prompt_en": "Make the teacher look furious and shouting.", "prompt_cn": "让这位老师看起来极度愤怒并在大喊。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 670, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/emotion_change/30.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/670/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Emotion_Change/1198/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Emotion_Change/1198/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/670/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/670/tie_2.png", "save_id": 1198, "prompt_en": "Make the person sitting in the car look like they are having road rage.", "prompt_cn": "让车里坐的人看起来像是路怒症发作了。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 671, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/emotion_change/62.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/671/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Emotion_Change/1220/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Emotion_Change/1220/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/671/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/671/loser.png", "save_id": 1220, "prompt_en": "Make him look utterly confused, scratching his head.", "prompt_cn": "让他看起来极度困惑,并且正在挠头。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 672, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/emotion_change/66.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/672/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Emotion_Change/1223/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Emotion_Change/1223/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/672/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/672/tie_2.png", "save_id": 1223, "prompt_en": "Make this little boy look lost in thought.", "prompt_cn": "让这个小男孩正在发呆。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 673, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/39.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/673/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/695/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/695/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/673/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/673/loser.png", "save_id": 695, "prompt_en": "Extract the boy riding the bicycle and his bicycle, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中骑自行车的男孩及其自行车,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 674, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/47.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/674/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/696/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/696/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/674/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/674/loser.png", "save_id": 696, "prompt_en": "Extract the hanging clock on the side of the building, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取建筑物侧面的挂钟,保持其位置、朝向和姿态不变,并将背景替换为纯白色。 ", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 675, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/53.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/675/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/697/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/697/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/675/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/675/loser.png", "save_id": 697, "prompt_en": "Extract the hat from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图片中的帽子,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 676, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/85.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/676/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/702/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/702/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/676/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/676/loser.png", "save_id": 702, "prompt_en": "Extract the blue Easter egg and its egg holder in the center, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出中间的蓝色彩蛋和蛋托,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 677, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/88.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/677/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/703/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/703/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/677/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/677/loser.png", "save_id": 703, "prompt_en": "Extract the black 8-ball, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出那颗黑色的8号台球,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 678, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/89.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/678/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/704/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/704/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/678/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/678/loser.png", "save_id": 704, "prompt_en": "Extract the white mug on the right, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出右侧的这个白色马克杯,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 679, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/89.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/679/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/704/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/704/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/679/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/679/loser.png", "save_id": 704, "prompt_en": "Extract the white mug on the right, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出右侧的这个白色马克杯,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 680, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/96.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/680/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/706/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/706/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/680/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/680/loser.png", "save_id": 706, "prompt_en": "Extract the TV from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的电视,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 681, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/103.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/681/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/709/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/709/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/681/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/681/loser.png", "save_id": 709, "prompt_en": "Extract the anime girl from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的动漫女孩,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 682, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/107.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/682/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/710/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/710/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/682/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/682/loser.png", "save_id": 710, "prompt_en": "Extract the child and the small dog they are walking, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的小孩以及牵着的小狗,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 683, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/120.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/683/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/713/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/713/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/683/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/683/loser.png", "save_id": 713, "prompt_en": "Extract the sofa from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中的沙发,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 684, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/131.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/684/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/715/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/715/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/684/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/684/loser.png", "save_id": 715, "prompt_en": "Extract the white cushion under the table, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取桌子下方的白色靠垫,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 685, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/131.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/685/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/715/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/715/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/685/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/685/loser.png", "save_id": 715, "prompt_en": "Extract the white cushion under the table, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取桌子下方的白色靠垫,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 686, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/153.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/686/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/717/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/717/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/686/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/686/loser.png", "save_id": 717, "prompt_en": "Extract the microphone from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的麦克风,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 687, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/167.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/687/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/721/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/721/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/687/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/687/loser.png", "save_id": 721, "prompt_en": "Extract the two people holding hands from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中手拉手的两个人,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 688, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/170.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/688/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/724/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/724/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/688/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/688/loser.png", "save_id": 724, "prompt_en": "Extract the blue jumpsuit from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的蓝色连体衣,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 689, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/172.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/689/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/727/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/727/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/689/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/689/loser.png", "save_id": 727, "prompt_en": "Extract the lunar rover from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的月球车,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 690, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/190.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/690/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/734/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/734/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/690/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/690/loser.png", "save_id": 734, "prompt_en": "Extract the paraglider and the pilot from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中的滑翔伞和飞行员,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 691, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/314.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/691/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/736/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/736/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/691/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/691/loser.png", "save_id": 736, "prompt_en": "Extract the doctor rubber duck on the left side of the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中左侧的医生小黄鸭,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 692, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/337.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/692/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/744/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/744/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/692/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/692/loser.png", "save_id": 744, "prompt_en": "Extract the flowers and the basket from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的花和篮子,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 693, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/355.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/693/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/748/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/748/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/693/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/693/loser.png", "save_id": 748, "prompt_en": "Extract the butterfly from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的蝴蝶,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 694, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/357.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/694/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/749/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/749/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/694/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/694/loser.png", "save_id": 749, "prompt_en": "Extract the teddy bear from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的泰迪熊,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 695, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/357.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/695/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/749/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/749/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/695/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/695/loser.png", "save_id": 749, "prompt_en": "Extract the teddy bear from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的泰迪熊,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 696, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/409.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/696/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/751/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/751/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/696/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/696/loser.png", "save_id": 751, "prompt_en": "Extract the Snoopy figurine wearing a graduation cap and the “Class of 2026” sign from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中戴学士帽的史努比公仔及“Class of 2026”牌子,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 697, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/409.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/697/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/751/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/751/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/697/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/697/loser.png", "save_id": 751, "prompt_en": "Extract the Snoopy figurine wearing a graduation cap and the “Class of 2026” sign from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中戴学士帽的史努比公仔及“Class of 2026”牌子,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 698, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/411.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/698/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/752/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/752/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/698/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/698/loser.png", "save_id": 752, "prompt_en": "Extract the sign from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的牌子,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 699, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/447.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/699/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/756/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/756/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/699/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/699/loser.png", "save_id": 756, "prompt_en": "Extract the mirror from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的镜子,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 700, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/466.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/700/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/761/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/761/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/700/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/700/loser.png", "save_id": 761, "prompt_en": "Extract the dark blue notebook on the right side of the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像右侧的深蓝色笔记本,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 701, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/472.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/701/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/764/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/764/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/701/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/701/loser.png", "save_id": 764, "prompt_en": "Extract the white balance bike on the left side of the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像左侧的白色平衡车,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 702, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/472.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/702/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/764/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Extract/764/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/702/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/702/loser.png", "save_id": 764, "prompt_en": "Extract the white balance bike on the left side of the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像左侧的白色平衡车,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 703, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/703/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/968/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/968/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/703/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/703/loser.png", "save_id": 968, "prompt_en": "Move the single cherry tomato onto the spoon.”", "prompt_cn": "将那颗单独的樱桃番茄移到勺子上。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 704, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/5.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/704/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/971/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/971/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/704/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/704/tie_2.png", "save_id": 971, "prompt_en": "Move the robot to the left.", "prompt_cn": "将机器人移动到左侧。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 705, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/7.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/705/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/973/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/973/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/705/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/705/loser.png", "save_id": 973, "prompt_en": "Move the coffee cup to the bottom-right corner of the image.", "prompt_cn": "将咖啡杯移动到右下角。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 706, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/7.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/706/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/973/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/973/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/706/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/706/loser.png", "save_id": 973, "prompt_en": "Move the coffee cup to the bottom-right corner of the image.", "prompt_cn": "将咖啡杯移动到右下角。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 707, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/15.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/707/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/980/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/980/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/707/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/707/loser.png", "save_id": 980, "prompt_en": "Move the painting to the right.", "prompt_cn": "将这幅画向右移动。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 708, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/19.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/708/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/982/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/982/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/708/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/708/loser.png", "save_id": 982, "prompt_en": "Move the stack of books onto the nearby bookshelf.", "prompt_cn": "将那叠书移动到附近的书架上。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 709, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/20.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/709/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/983/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/983/12.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/709/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/709/loser.png", "save_id": 983, "prompt_en": "Move the wall clock to the left of the television.", "prompt_cn": "将墙上的时钟移动到电视的左侧。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 710, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/25.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/710/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/988/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/988/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/710/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/710/tie_2.png", "save_id": 988, "prompt_en": "Move the spoon to the napkin beside the bowl.", "prompt_cn": "将勺子移到碗旁边的餐巾纸上。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 711, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/26.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/711/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/989/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/989/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/711/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/711/loser.png", "save_id": 989, "prompt_en": "Move the candle to the front-right corner of the table.", "prompt_cn": "将蜡烛移动到桌子右前方的角落。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 712, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/29.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/712/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/992/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/992/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/712/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/712/tie_2.png", "save_id": 992, "prompt_en": "Move the strawberry onto the fork next to the cake.", "prompt_cn": "将草莓移到蛋糕旁边的叉子上。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 713, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/31.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/713/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/994/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/994/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/713/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/713/tie_2.png", "save_id": 994, "prompt_en": "Move the flower vase to the edge of the table.", "prompt_cn": "将花瓶移到桌子边缘。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 714, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/33.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/714/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/995/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/995/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/714/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/714/tie_2.png", "save_id": 995, "prompt_en": "Move the red car to the right side of the street.", "prompt_cn": "将红色汽车移动到街道的右侧。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 715, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/36.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/715/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/996/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/996/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/715/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/715/loser.png", "save_id": 996, "prompt_en": "Move the bicycle to lean against the lamp post.", "prompt_cn": "让自行车靠在路灯杆上。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 716, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/37.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/716/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/997/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/997/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/716/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/716/loser.png", "save_id": 997, "prompt_en": "Move the pigeon to sit on the backrest of the bench.", "prompt_cn": "将那只鸽子移动到长椅的靠背上,让它坐在上面。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 717, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/40.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/717/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/999/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/999/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/717/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/717/loser.png", "save_id": 999, "prompt_en": "Move the trash can to be under the street light.", "prompt_cn": "将垃圾桶移动到直接位于路灯下方的位置。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 718, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/45.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/718/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1002/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1002/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/718/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/718/tie_2.png", "save_id": 1002, "prompt_en": "Move the tent to the right side.", "prompt_cn": "将帐篷移动到右侧。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 719, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/50.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/719/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1003/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1003/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/719/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/719/loser.png", "save_id": 1003, "prompt_en": "Move the boat to the center of the lake.", "prompt_cn": "将船移动到湖中央。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 720, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/62.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/720/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1012/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1012/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/720/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/720/loser.png", "save_id": 1012, "prompt_en": "Move the backpack onto the grass beside the bench.", "prompt_cn": "将背包移动到长椅旁边的草地上。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 721, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/71.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/721/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1019/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1019/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/721/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/721/loser.png", "save_id": 1019, "prompt_en": "Move the printer onto the adjacent desk.", "prompt_cn": "将打印机移动到旁边的桌子上。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 722, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/71.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/722/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1019/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1019/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/722/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/722/loser.png", "save_id": 1019, "prompt_en": "Move the printer onto the adjacent desk.", "prompt_cn": "将打印机移动到旁边的桌子上。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 723, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/73.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/723/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1020/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1020/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/723/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/723/tie_2.png", "save_id": 1020, "prompt_en": "Move the hat from her head to her hand.", "prompt_cn": "将帽子从她的头上移到她的手中。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 724, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/81.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/724/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1025/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1025/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/724/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/724/loser.png", "save_id": 1025, "prompt_en": "Move the ring to the ring finger.", "prompt_cn": "将戒指移到无名指上。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 725, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/85.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/725/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1026/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1026/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/725/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/725/loser.png", "save_id": 1026, "prompt_en": "Move the potion bottle onto the table next to the open book.", "prompt_cn": "将药水瓶移到桌子上,放在打开的书旁边。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 726, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/92.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/726/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1027/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1027/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/726/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/726/tie_2.png", "save_id": 1027, "prompt_en": "Move the fairy to fly in the air.", "prompt_cn": "将小仙子移动到空中,使她在空中飞行。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 727, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/107.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/727/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1033/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1033/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/727/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/727/loser.png", "save_id": 1033, "prompt_en": "Move the garden bench to be under the large tree on the lawn.", "prompt_cn": "将花园长椅移动,使其摆放在草坪上那棵大树的下面。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 728, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/107.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/728/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1033/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1033/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/728/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/728/loser.png", "save_id": 1033, "prompt_en": "Move the garden bench to be under the large tree on the lawn.", "prompt_cn": "将花园长椅移动,使其摆放在草坪上那棵大树的下面。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 729, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/109.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/729/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1035/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1035/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/729/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/729/tie_2.png", "save_id": 1035, "prompt_en": "Move the flower pot to sit on the porch steps.", "prompt_cn": "将花盆移动到门廊台阶上摆放。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 730, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/110.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/730/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1036/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/1036/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/730/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/730/loser.png", "save_id": 1036, "prompt_en": "Move the air conditioner unit onto the ground.", "prompt_cn": "将空调机移动在地上。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 731, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Move/111.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/731/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/3391/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Move/3391/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/731/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/731/tie_2.png", "save_id": 3391, "prompt_en": "Place the teapot lid inside the teacup.", "prompt_cn": "将茶壶的壶盖移动到茶碗里面。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 732, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/2.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/732/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1107/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1107/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/732/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/732/loser.png", "save_id": 1107, "prompt_en": "Have the girl pick up the two dumbbells at her feet and perform a chest press.", "prompt_cn": "让女孩捡起脚边的两个哑铃并进行卧推动作。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 733, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/10.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/733/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1114/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1114/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/733/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/733/loser.png", "save_id": 1114, "prompt_en": "Make the two boys bump fists.", "prompt_cn": "让两个男生正在进行碰拳的动作。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 734, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/13.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/734/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1117/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1117/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/734/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/734/tie_2.png", "save_id": 1117, "prompt_en": "Have the older brother lift up his younger sister.", "prompt_cn": "让哥哥把妹妹抱起来。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 735, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/18.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/735/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1122/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1122/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/735/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/735/loser.png", "save_id": 1122, "prompt_en": "Make the athlete perform a kettlebell swing.", "prompt_cn": "让这位运动员进行壶铃摆动动作。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 736, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/19.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/736/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1123/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1123/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/736/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/736/tie_2.png", "save_id": 1123, "prompt_en": "Make the boy kick the soccer ball.", "prompt_cn": "让这个男孩看起来正在踢足球。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 737, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/21.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/737/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1125/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1125/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/737/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/737/loser.png", "save_id": 1125, "prompt_en": "Make the player holding the volleyball appear to be serving the ball.", "prompt_cn": "让拿着排球的这名球员看起来正在发排球。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 738, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/24.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/738/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1127/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1127/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/738/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/738/loser.png", "save_id": 1127, "prompt_en": "Make the painter paint the wall with the roller.", "prompt_cn": "让油漆工使用滚筒给墙面刷漆。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 739, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/25.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/739/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1128/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1128/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/739/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/739/tie_2.png", "save_id": 1128, "prompt_en": "Make the chef chop the carrot.", "prompt_cn": "让这位厨师看起来正在切胡萝卜。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 740, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/27.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/740/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1130/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1130/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/740/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/740/loser.png", "save_id": 1130, "prompt_en": "Make the boy brush his teeth.", "prompt_cn": "让这个男孩刷牙。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 741, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/30.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/741/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1132/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1132/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/741/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/741/tie_2.png", "save_id": 1132, "prompt_en": "Make the runner drink from the water bottle.", "prompt_cn": "让这位跑步者看起来正在从水瓶中喝水。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 742, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/32.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/742/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1134/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1134/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/742/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/742/tie_2.png", "save_id": 1134, "prompt_en": "Make the man pick up and eat the burger.", "prompt_cn": "让这个男人拿起汉堡并吃下去。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 743, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/35.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/743/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1136/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1136/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/743/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/743/loser.png", "save_id": 1136, "prompt_en": "Make the boy eat the noodles held by the chopsticks.", "prompt_cn": "让男孩吃筷子上的面条。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 744, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/36.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/744/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1137/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1137/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/744/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/744/loser.png", "save_id": 1137, "prompt_en": "Make the child lick the ice cream.", "prompt_cn": "让这个孩子看起来正在舔冰淇淋。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 745, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/39.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/745/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1140/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1140/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/745/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/745/loser.png", "save_id": 1140, "prompt_en": "Make the tourist ride the camel.", "prompt_cn": "让游客骑在骆驼上。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 746, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/44.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/746/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1144/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1144/12.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/746/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/746/loser.png", "save_id": 1144, "prompt_en": "Make the student kick the sandbag.", "prompt_cn": "让这名学员踢沙袋。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 747, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/50.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/747/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1148/14.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1148/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/747/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/747/tie_2.png", "save_id": 1148, "prompt_en": "Make the programmer type on the laptop keyboard.", "prompt_cn": "让这位程序员看起来正在笔记本电脑键盘上打字。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 748, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/55.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/748/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1152/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1152/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/748/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/748/tie_2.png", "save_id": 1152, "prompt_en": "Make the woman wear the hat.", "prompt_cn": "让这位女性把帽子戴在头上。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 749, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/70.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/749/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1157/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1157/12.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/749/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/749/loser.png", "save_id": 1157, "prompt_en": "Make the dog hold the stick in its mouth.", "prompt_cn": "让这只狗把木棍叼在嘴里。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 750, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/72.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/750/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1158/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1158/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/750/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/750/loser.png", "save_id": 1158, "prompt_en": "Make the girl hug the teddy bear tightly.", "prompt_cn": "让女孩紧紧地抱着泰迪熊。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 751, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/73.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/751/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1159/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1159/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/751/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/751/tie_2.png", "save_id": 1159, "prompt_en": "Have the astronaut walk over and grab the flag stuck in the ground.", "prompt_cn": "让宇航员走过去握住插在地上的那面旗子。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 752, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/77.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/752/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1163/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1163/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/752/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/752/tie_2.png", "save_id": 1163, "prompt_en": "Make the robot hold the box tightly using its mechanical arm.", "prompt_cn": "让机器人用它的机械手臂抱住箱子。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 753, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/77.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/753/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1163/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1163/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/753/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/753/loser.png", "save_id": 1163, "prompt_en": "Make the robot hold the box tightly using its mechanical arm.", "prompt_cn": "让机器人用它的机械手臂抱住箱子。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 754, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/78.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/754/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1164/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1164/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/754/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/754/loser.png", "save_id": 1164, "prompt_en": "Make the crow pick up the coin in its beak.", "prompt_cn": "让乌鸦用喙叼起那枚硬币。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 755, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/79.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/755/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1165/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1165/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/755/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/755/tie_2.png", "save_id": 1165, "prompt_en": "Make the weightlifter lift the barbell overhead.", "prompt_cn": "让举重运动员把杠铃举到头顶上方。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 756, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/80.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/756/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1166/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1166/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/756/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/756/tie_2.png", "save_id": 1166, "prompt_en": "Make the person bend down and pick up the hat.", "prompt_cn": "让这个人弯下腰并捡起帽子。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 757, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/81.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/757/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1167/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1167/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/757/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/757/loser.png", "save_id": 1167, "prompt_en": "Make the cat push the ball of yarn with its paw.", "prompt_cn": "让猫用爪子推毛线球。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 758, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/86.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/758/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1170/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1170/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/758/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/758/tie_2.png", "save_id": 1170, "prompt_en": "Make the teenager ride the skateboard.", "prompt_cn": "让这个青少年正在踩着滑板滑行。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 759, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/91.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/759/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1171/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1171/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/759/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/759/tie_2.png", "save_id": 1171, "prompt_en": "Make the barber reach out and touch the customer’s hair.", "prompt_cn": "让理发师去摸顾客的头发。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 760, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/91.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/760/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1171/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1171/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/760/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/760/loser.png", "save_id": 1171, "prompt_en": "Make the barber reach out and touch the customer’s hair.", "prompt_cn": "让理发师去摸顾客的头发。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 761, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/92.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/761/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1172/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1172/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/761/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/761/tie_2.png", "save_id": 1172, "prompt_en": "Make the red and blue bumper cars collide with each other.", "prompt_cn": "让红色和蓝色的碰碰车彼此相撞。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 762, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/92.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/762/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1172/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1172/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/762/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/762/loser.png", "save_id": 1172, "prompt_en": "Make the red and blue bumper cars collide with each other.", "prompt_cn": "让红色和蓝色的碰碰车彼此相撞。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 763, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Obj_interaction/95.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/763/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1175/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Object_Interaction/1175/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/763/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/763/loser.png", "save_id": 1175, "prompt_en": "Have the man pick up the camera and start taking photos.", "prompt_cn": "让这位男士拿起相机开始拍照。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 764, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/16.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/764/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/138/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/138/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/764/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/764/loser.png", "save_id": 138, "prompt_en": "Remove the robot figure on the right.", "prompt_cn": "移除右边的机器人公仔", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 765, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/27.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/765/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/140/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/140/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/765/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/765/loser.png", "save_id": 140, "prompt_en": "Remove the heart from the image.", "prompt_cn": "移除图像中的爱心。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 766, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/87.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/766/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/152/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/152/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/766/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/766/loser.png", "save_id": 152, "prompt_en": "Remove the single noticeable white egg from the middle of the carton.", "prompt_cn": "移除蛋盒中唯一那颗显眼的白色鸡蛋", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 767, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/245.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/767/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/155/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/155/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/767/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/767/loser.png", "save_id": 155, "prompt_en": "Remove the small boat from the image.", "prompt_cn": "移除图中的小船。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 768, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/257.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/768/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/157/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/157/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/768/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/768/loser.png", "save_id": 157, "prompt_en": "Remove the camera from the image.", "prompt_cn": "移除图中的相机。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 769, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/261.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/769/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/158/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/158/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/769/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/769/loser.png", "save_id": 158, "prompt_en": "Remove all the popcorn pieces that are scattered on the table.", "prompt_cn": "移除桌面上所有散落的爆米花粒。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 770, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/300.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/770/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/171/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/171/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/770/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/770/loser.png", "save_id": 171, "prompt_en": "Remove the red trash can.", "prompt_cn": "移除红色的垃圾桶。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 771, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/388.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/771/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/172/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/172/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/771/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/771/loser.png", "save_id": 172, "prompt_en": "Remove the blanket from the sofa.", "prompt_cn": "移除沙发上的毯子。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 772, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/393.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/772/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/173/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/173/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/772/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/772/loser.png", "save_id": 173, "prompt_en": "Remove the microwave from the image.", "prompt_cn": "移除图中的微波炉。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 773, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Multi_Image_Aware/5/source_1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/773/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Image_Aware/2888/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Image_Aware/2888/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/773/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/773/loser.png", "save_id": 2888, "prompt_en": "Match the material of the ring on the hand in the original image to the material of the bracelet in the reference image.", "prompt_cn": "将原始图像中手上的戒指的材质和参考图像中镯子的材质保持一致。", "label": "preference", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/773/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 774, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Multi_Image_Aware/12/source_1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/774/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Image_Aware/2895/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Image_Aware/2895/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/774/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/774/loser.png", "save_id": 2895, "prompt_en": "Match the state of the water in the cup in the original image to the state of the water in the cup in the reference image.", "prompt_cn": "将原始图像中杯子里的水的状态与参考图像中杯子里的水的状态保持一致。", "label": "preference", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/774/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 775, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Multi_Image_Aware/36/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/775/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Image_Aware/2917/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Image_Aware/2917/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/775/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/775/loser.png", "save_id": 2917, "prompt_en": "Remove the object in the original image that serves the same function as the object in the reference image.", "prompt_cn": "将原始图像中与参考图像功能相同的物体移除。", "label": "preference", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/775/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 776, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Multi_pos/23/source_1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/776/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2937/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2937/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/776/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/776/loser.png", "save_id": 2937, "prompt_en": "Tie the object from the reference image around the woman’s neck in the first image.", "prompt_cn": "在图一女人脖子上系上参考图像中的物体", "label": "preference", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/776/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 777, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/1/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/777/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2985/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2985/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/777/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/777/tie_2.png", "save_id": 2985, "prompt_en": "Change the man’s outfit to the T-shirt from reference image 1 and the jeans from reference image 2", "prompt_cn": "让男子换上参考图像1的T恤和参考图像2的牛仔裤。", "label": "tie", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/777/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/777/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 778, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/3/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/778/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2987/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2987/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/778/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/778/loser.png", "save_id": 2987, "prompt_en": "Change the man’s outfit to the leather jacket from reference image 1 and the jeans from reference image 2.", "prompt_cn": "给男子换上参考图像1的皮夹克和参考图像2的牛仔裤。", "label": "preference", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/778/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/778/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 779, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/3/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/779/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2987/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2987/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/779/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/779/loser.png", "save_id": 2987, "prompt_en": "Change the man’s outfit to the leather jacket from reference image 1 and the jeans from reference image 2.", "prompt_cn": "给男子换上参考图像1的皮夹克和参考图像2的牛仔裤。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/779/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/779/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 780, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/4/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/780/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2988/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2988/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/780/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/780/loser.png", "save_id": 2988, "prompt_en": "Replace the model’s outfit with the shirt from reference image 1 and the skirt from reference image 2.", "prompt_cn": "将模特的服装换成参考图像1的衬衫和参考图像2的裙子。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/780/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/780/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 781, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/5/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/781/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2989/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2989/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/781/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/781/loser.png", "save_id": 2989, "prompt_en": "Dress the businessman by replacing his outfit with the suit from reference image 1 and the tie from reference image 2.", "prompt_cn": "让这位商务男士换上参考图像1的西装和参考图像2的领带", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/781/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/781/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 782, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/6/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/782/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2990/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2990/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/782/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/782/loser.png", "save_id": 2990, "prompt_en": "Change the seated girl’s outfit to the hoodie from reference image 1 and the pants from reference image 2.", "prompt_cn": "让坐着的女孩换上参考图像1的连帽衫和参考图像2的裤子。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/782/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/782/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 783, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/8/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/783/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2992/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2992/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/783/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/783/tie_2.png", "save_id": 2992, "prompt_en": "Change the girl’s clothing to match the tank top in reference image 1 and the wide-leg pants in reference image 2.", "prompt_cn": "把女生的衣服换成参考图像1的背心和参考图像2的阔腿裤。", "label": "tie", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/783/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/783/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 784, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/9/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/784/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2993/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2993/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/784/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/784/loser.png", "save_id": 2993, "prompt_en": "Dress the girl in the top from reference image 1 and the yoga pants from reference image 2.", "prompt_cn": "给这个女孩穿上参考图像1的上衣和参考图像2的瑜伽裤。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/784/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/784/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 785, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/10/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/785/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2994/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2994/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/785/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/785/loser.png", "save_id": 2994, "prompt_en": "Replace the dancer’s outfit with the ballet tutu from reference image 1.", "prompt_cn": "让舞者换上参考图像1的芭蕾舞裙。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/785/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 786, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/10/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/786/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2994/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2994/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/786/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/786/loser.png", "save_id": 2994, "prompt_en": "Replace the dancer’s outfit with the ballet tutu from reference image 1.", "prompt_cn": "让舞者换上参考图像1的芭蕾舞裙。", "label": "preference", "dimension": "VQ", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/786/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 787, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/13/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/787/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2997/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2997/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/787/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/787/tie_2.png", "save_id": 2997, "prompt_en": "Replace her outfit with the office shirt from reference image 1 and the pencil skirt from reference image 2.", "prompt_cn": "将她的衣服换成参考图像1的办公衬衫和参考图像2的铅笔裙", "label": "tie", "dimension": "VQ", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/787/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/787/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 788, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/13/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/788/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2997/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2997/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/788/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/788/tie_2.png", "save_id": 2997, "prompt_en": "Replace her outfit with the office shirt from reference image 1 and the pencil skirt from reference image 2.", "prompt_cn": "将她的衣服换成参考图像1的办公衬衫和参考图像2的铅笔裙", "label": "tie", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/788/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/788/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 789, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/13/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/789/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2997/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2997/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/789/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/789/loser.png", "save_id": 2997, "prompt_en": "Replace her outfit with the office shirt from reference image 1 and the pencil skirt from reference image 2.", "prompt_cn": "将她的衣服换成参考图像1的办公衬衫和参考图像2的铅笔裙", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/789/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/789/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 790, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Multi_pos/35/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/790/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2949/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2949/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/790/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/790/loser.png", "save_id": 2949, "prompt_en": "Have the woman in the first image hold the yoga mat from reference image 1 and place her in the rice field background from reference image 2.", "prompt_cn": "让第一张图像中的女性拿着参考图像1的瑜伽垫,站在参考图像2的稻田背景中。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/790/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/790/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 791, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Multi_pos/39/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/791/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2953/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2953/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/791/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/791/loser.png", "save_id": 2953, "prompt_en": "Place the man from image 1 with the fishing rod from image 2 on the dock scene from image 3.", "prompt_cn": "让第一张图像中的男性拿着第二张图像中的钓鱼竿,坐在第三张图像的码头上。", "label": "preference", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/791/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/791/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 792, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Multi_pos/41/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/792/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2955/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2955/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/792/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/792/loser.png", "save_id": 2955, "prompt_en": "Place the woman from image 1 with the champagne from image 2 onto the yacht deck scene from image 3.", "prompt_cn": "让第一张图像中的女性拿着第二张图像中的香槟,站在第三张图像的游艇甲板上。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/792/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/792/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 793, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Multi_pos/74/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/793/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2968/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2968/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/793/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/793/loser.png", "save_id": 2968, "prompt_en": "Place the musician from image 1 on the music hall stage in image 3, holding the golden trophy from image 2.", "prompt_cn": "让第一张图像中的女演奏家手持第二张图像中的金色奖杯,站在第三张图像的音乐厅舞台上。", "label": "preference", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/793/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/793/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 794, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Multi_pos/76/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/794/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2970/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2970/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/794/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/794/loser.png", "save_id": 2970, "prompt_en": "Place the dog and red frisbee from image 1 on the sunny beach from image 2.", "prompt_cn": "让第一张图像中的犬和红色飞盘,放在第二张图像中的阳光沙滩上。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/794/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 795, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Multi_pos/82/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/795/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2975/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2975/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/795/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/795/loser.png", "save_id": 2975, "prompt_en": "Have the man from image 1 hold the wooden guitar from image 2.", "prompt_cn": "让第一张图像中的男性拿着第二张图像中的木吉。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/795/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 796, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Multi_pos/87/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/796/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2978/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2978/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/796/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/796/loser.png", "save_id": 2978, "prompt_en": "Place the person from image 1 holding the bouquet of flowers from image 2 at the house doorstep background of image 3", "prompt_cn": "让第一张图像中的人在第三张图像房子门口背景前,手里拿着第二张图像中的花束", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/796/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/796/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 797, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Multi_pos/92/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/797/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2981/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2981/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/797/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/797/loser.png", "save_id": 2981, "prompt_en": "Place Santa Claus from Image 1 holding the surfboard from Image 2 on the tropical beach from Image 3.", "prompt_cn": "让第一张图像中的圣诞老人拿着第二张图像中的冲浪板,站在第三张图像的热带海滩上。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/797/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/797/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 798, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Multi_pos/93/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/798/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2982/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2982/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/798/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/798/tie_2.png", "save_id": 2982, "prompt_en": "Place the bride from image 1 in the wedding church, playing the electric guitar from image 2.", "prompt_cn": "让第一张图像中的新娘在婚礼教堂里弹奏第二张图像中的电吉他。", "label": "tie", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/798/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 799, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Multi_pos/98/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/799/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2983/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2983/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/799/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/799/loser.png", "save_id": 2983, "prompt_en": "Have the young man from Image 1 reading the book from Image 2 while sitting in the living room from Image 3.", "prompt_cn": "让第一张图像中的年轻人在第三张图像的客厅里阅读第二张图像中的书。", "label": "preference", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/799/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/799/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 800, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Multi_pos/98/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/800/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2983/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2983/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/800/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/800/loser.png", "save_id": 2983, "prompt_en": "Have the young man from Image 1 reading the book from Image 2 while sitting in the living room from Image 3.", "prompt_cn": "让第一张图像中的年轻人在第三张图像的客厅里阅读第二张图像中的书。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/800/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/800/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 801, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Multi_pos/101/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/801/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2984/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Multi_Ref/2984/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/801/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/801/loser.png", "save_id": 2984, "prompt_en": "Have the runner from image 1 carrying the water bottle from image 2.", "prompt_cn": "让第一张图像中的跑步者手里拿着第二张图像中的水壶。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/801/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 802, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/14/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/802/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2998/15.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/2998/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/802/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/802/loser.png", "save_id": 2998, "prompt_en": "Replace the girl’s outfit with the skinny jeans from reference image 1 and the cropped cardigan from reference image 2.", "prompt_cn": "给女生换上参考图像1的紧身牛仔裤和参考图像2的短款开衫。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/802/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/802/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 803, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/16/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/803/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3000/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3000/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/803/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/803/loser.png", "save_id": 3000, "prompt_en": "Change the girl’s clothing to the cherry midriff top from reference image 1 paired with the pleated skirt from reference image 2.", "prompt_cn": "让女孩换上参考图像1的樱桃露脐上衣和参考图像2的百褶裙", "label": "preference", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/803/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/803/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 804, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/16/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/804/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3000/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3000/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/804/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/804/loser.png", "save_id": 3000, "prompt_en": "Change the girl’s clothing to the cherry midriff top from reference image 1 paired with the pleated skirt from reference image 2.", "prompt_cn": "让女孩换上参考图像1的樱桃露脐上衣和参考图像2的百褶裙", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/804/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/804/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 805, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/22/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/805/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3006/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3006/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/805/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/805/tie_2.png", "save_id": 3006, "prompt_en": "Change the traveler's clothes to the windbreaker and hiking trousers from reference image 1 and reference image 2.", "prompt_cn": "将旅行者的衣服换成参考图像1的防风衣和参考图像2的徒步裤。", "label": "tie", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/805/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/805/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 806, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/23/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/806/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3007/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3007/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/806/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/806/loser.png", "save_id": 3007, "prompt_en": "Change the model’s clothing to the fluorescent sports bra from reference image 1 paired with the shorts from reference image 2.", "prompt_cn": "让模特换上参考图像1的荧光色运动内衣和参考图像2的短裤。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/806/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/806/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 807, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/24/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/807/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3008/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3008/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/807/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/807/loser.png", "save_id": 3008, "prompt_en": "Replace his outfit with the band T-shirt from reference image 1 and the pants from reference image 2.", "prompt_cn": "将他的装束换成参考图像1的乐队T恤和参考图像2的裤子。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/807/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/807/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 808, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/24/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/808/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3008/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3008/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/808/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/808/loser.png", "save_id": 3008, "prompt_en": "Replace his outfit with the band T-shirt from reference image 1 and the pants from reference image 2.", "prompt_cn": "将他的装束换成参考图像1的乐队T恤和参考图像2的裤子。", "label": "preference", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/808/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/808/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 809, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/25/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/809/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3009/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3009/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/809/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/809/loser.png", "save_id": 3009, "prompt_en": "Replace the girl’s outfit with the lace wedding dress from reference image 1.", "prompt_cn": "让女孩换上参考图像1的蕾丝婚纱", "label": "preference", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/809/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 810, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/29/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/810/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3013/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3013/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/810/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/810/loser.png", "save_id": 3013, "prompt_en": "Replace the girl’s outfit with the uniform shirt from reference image 1 and the apron from reference image 2.", "prompt_cn": "让女生换上参考图像1的制服衬衫并系上参考图像2的围裙", "label": "preference", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/810/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/810/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 811, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/30/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/811/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3014/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3014/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/811/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/811/loser.png", "save_id": 3014, "prompt_en": "Replace the skateboarder’s outfit with the T-shirt from reference image 1 and the pants from reference image 2.", "prompt_cn": "将滑板手的装束换成参考图像1的T 恤和参考图像2的裤子", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/811/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/811/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 812, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/31/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/812/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3015/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3015/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/812/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/812/loser.png", "save_id": 3015, "prompt_en": "Replace the worker’s outfit with the clothes from reference image 1 and put on the hat from reference image 2.", "prompt_cn": "给工人换上参考图像1的衣服并戴上参考图像2的帽子。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/812/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/812/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 813, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/34/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/813/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3018/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3018/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/813/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/813/tie_2.png", "save_id": 3018, "prompt_en": "Replace her outfit with the shirt from reference image 1 and the leather skirt from reference image 2.", "prompt_cn": "将她的装束换成参考图像1的衬衫和参考图像2的皮裙", "label": "tie", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/813/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/813/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 814, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/38/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/814/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3022/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3022/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/814/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/814/loser.png", "save_id": 3022, "prompt_en": "Change the man’s clothing to the trench coat from reference image 1 paired with the trousers from reference image 2.", "prompt_cn": "给男子换上参考图像1的风衣和参考图像2的长裤", "label": "preference", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/814/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/814/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 815, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/40/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/815/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3023/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3023/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/815/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/815/loser.png", "save_id": 3023, "prompt_en": "Change the man’s clothing to the outfit shown in reference image 1.", "prompt_cn": "让男子换上参考图像1的衣服。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/815/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 816, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/41/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/816/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3024/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3024/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/816/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/816/loser.png", "save_id": 3024, "prompt_en": "Dress her by replacing her outfit with the clothes from reference image 1.", "prompt_cn": "将她的衣服换成参考图像1的衣服。", "label": "preference", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/816/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 817, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/43/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/817/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3026/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3026/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/817/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/817/loser.png", "save_id": 3026, "prompt_en": "Change the man’s clothing to the shirt from reference image 1 paired with the trousers from reference image 2.", "prompt_cn": "让男子换上参考图像1的衬衫和参考图像2的裤子。", "label": "preference", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/817/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/817/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 818, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/44/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/818/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3027/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3027/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/818/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/818/loser.png", "save_id": 3027, "prompt_en": "Replace the boy’s outfit with the top from reference image 1 and the pants from reference image 2.", "prompt_cn": "让男生换上参考图像1的上衣和参考图像2的裤子。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/818/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/818/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 819, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/44/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/819/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3027/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3027/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/819/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/819/loser.png", "save_id": 3027, "prompt_en": "Replace the boy’s outfit with the top from reference image 1 and the pants from reference image 2.", "prompt_cn": "让男生换上参考图像1的上衣和参考图像2的裤子。", "label": "preference", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/819/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/819/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 820, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/46/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/820/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3029/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3029/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/820/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/820/loser.png", "save_id": 3029, "prompt_en": "Change the girl's clothes to the dress from reference image 1", "prompt_cn": "将女孩的衣服换成参考图像1的裙子。", "label": "preference", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/820/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 821, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/47/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/821/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3030/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3030/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/821/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/821/loser.png", "save_id": 3030, "prompt_en": "Change the boy’s clothing to the outfit shown in reference image 1.", "prompt_cn": "将男生的衣服换成参考图像1的衣服。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/821/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 822, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/48/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/822/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3031/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3031/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/822/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/822/loser.png", "save_id": 3031, "prompt_en": "Change the girl’s clothing to the shirt from reference image 1 paired with the trousers from reference image 2.", "prompt_cn": "将女生的装束换成参考图像1的衬衫和参考图像2的裤子。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/822/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/822/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 823, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/49/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/823/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3032/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3032/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/823/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/823/loser.png", "save_id": 3032, "prompt_en": "Replace the girl’s outfit with the clothes from reference image 1.", "prompt_cn": "将女生的衣服换成参考图像1的衣服。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/823/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 824, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/50/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/824/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3033/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3033/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/824/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/824/loser.png", "save_id": 3033, "prompt_en": "Change the man’s clothing to the outfit shown in reference image 1.", "prompt_cn": "让男子换上参考图像1的衣服。", "label": "preference", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/824/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 825, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/56/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/825/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3039/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3039/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/825/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/825/loser.png", "save_id": 3039, "prompt_en": "Replace the girl’s outfit with the jacket from reference image 1, the skirt from reference image 2, and the shoes from reference image 3.", "prompt_cn": "给女孩穿上参考图像1的外套、参考图像2的裙子和参考图像3的鞋子。", "label": "preference", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/825/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/825/ref_2.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/825/ref_3.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 826, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part5/Virtual_Try_On/58/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/826/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3041/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part5/Virtual_Try_On/3041/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/826/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/826/tie_2.png", "save_id": 3041, "prompt_en": "Change the girl’s outfit to the pants from reference image 1, the cropped jacket from reference image 2, and the running shoes from reference image 3.", "prompt_cn": "给女孩换上参考图像1的裤子、参考图像2的短外套和参考图像3的跑鞋。", "label": "tie", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/826/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/826/ref_2.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/826/ref_3.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 827, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Remove_attribute/8.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/827/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/186/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/186/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/827/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/827/loser.png", "save_id": 186, "prompt_en": "Remove all leather furniture.", "prompt_cn": "移除所有皮革家具。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 828, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Remove_attribute/14.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/828/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/192/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/192/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/828/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/828/loser.png", "save_id": 192, "prompt_en": "Remove all electronic devices.", "prompt_cn": "移除所有电子设备。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 829, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Remove_spatial/6.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/829/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/200/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Remove/200/12.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/829/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/829/loser.png", "save_id": 200, "prompt_en": "Remove the vehicle that is nearest to the traffic sign.", "prompt_cn": "移除离交通指示牌最近的车辆。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 830, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/28.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/830/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/225/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/225/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/830/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/830/loser.png", "save_id": 225, "prompt_en": "Replace the small beige pillow on the right side of the table with a vintage metal lantern.", "prompt_cn": "将桌子右侧的米白色小抱枕换成一个复古金属小风灯", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 831, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/28.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/831/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/225/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/225/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/831/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/831/loser.png", "save_id": 225, "prompt_en": "Replace the small beige pillow on the right side of the table with a vintage metal lantern.", "prompt_cn": "将桌子右侧的米白色小抱枕换成一个复古金属小风灯", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 832, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/28.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/832/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/225/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/225/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/832/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/832/tie_2.png", "save_id": 225, "prompt_en": "Replace the small beige pillow on the right side of the table with a vintage metal lantern.", "prompt_cn": "将桌子右侧的米白色小抱枕换成一个复古金属小风灯", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 833, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/99.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/833/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/233/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/233/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/833/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/833/tie_2.png", "save_id": 233, "prompt_en": "Replace the wooden bench with a bicycle.", "prompt_cn": "将木长椅替换为一辆自行车。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 834, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/111.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/834/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/235/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/235/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/834/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/834/tie_2.png", "save_id": 235, "prompt_en": "Change the dog sitting next to the woman into a cat on the grass.", "prompt_cn": "把紧挨女子身边的那只狗改成一只猫", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 835, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/126.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/835/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/244/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/244/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/835/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/835/tie_2.png", "save_id": 244, "prompt_en": "Replace the decorative pillow with a teddy bear.", "prompt_cn": "把圆柱形抱枕换成一只泰迪熊。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 836, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/139.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/836/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/251/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/251/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/836/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/836/tie_2.png", "save_id": 251, "prompt_en": "Change the light purple bedspread to a Pikachu-themed design.", "prompt_cn": "将浅紫色床罩改为皮卡丘主题设计。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 837, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/160.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/837/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/256/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/256/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/837/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/837/loser.png", "save_id": 256, "prompt_en": "Replace the gold pillow on the sofa with a white cushion.", "prompt_cn": "把沙发上的金色枕头替换成白色靠垫。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 838, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/260.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/838/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/265/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/265/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/838/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/838/loser.png", "save_id": 265, "prompt_en": "Replace the emoji face with a realistic small football.", "prompt_cn": "将中央的表情圆形替换成一个真实质感的小足球。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 839, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/377.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/839/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/271/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/271/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/839/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/839/tie_2.png", "save_id": 271, "prompt_en": "Replace the spoon with a Dove chocolate.", "prompt_cn": "将勺子换成德芙巧克力。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 840, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/420.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/840/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/273/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/273/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/840/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/840/loser.png", "save_id": 273, "prompt_en": "Replace the man in the red jacket with a woman wearing a yellow coat.", "prompt_cn": "用一位穿黄色外套的女性替换红衣男子。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 841, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/420.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/841/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/273/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/273/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/841/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/841/loser.png", "save_id": 273, "prompt_en": "Replace the man in the red jacket with a woman wearing a yellow coat.", "prompt_cn": "用一位穿黄色外套的女性替换红衣男子。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 842, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/437.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/842/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/281/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/281/12.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/842/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/842/tie_2.png", "save_id": 281, "prompt_en": "Replace the paint bucket with a desktop computer tower.", "prompt_cn": "将油漆桶换成一台台式电脑主机。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 843, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/443.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/843/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/285/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/285/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/843/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/843/tie_2.png", "save_id": 285, "prompt_en": "Replace the central black coffee cup with a ceramic mug that has a yellow base color and black polka dots.", "prompt_cn": "将中间的黑色咖啡杯替换为一只带有黄底黑点图案的陶瓷马克杯。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 844, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/449.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/844/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/288/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/288/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/844/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/844/tie_2.png", "save_id": 288, "prompt_en": "Change the ottoman into a shoe cabinet.", "prompt_cn": "将脚凳换成一个鞋柜。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 845, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/467.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/845/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/295/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Replace/295/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/845/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/845/tie_2.png", "save_id": 295, "prompt_en": "Replace the person in the foreground with a Mickey Mouse neon sign.", "prompt_cn": "把前景的人换为一个米老鼠的霓虹灯牌。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 846, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part1/image/75.webp", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/846/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Style_Transfer/618/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part1/Style_Transfer/618/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/846/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/846/tie_2.png", "save_id": 618, "prompt_en": "Convert the image into an 8-bit pixel art style.", "prompt_cn": "将图像转换为 8 位像素画风格。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 847, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Swap/8.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/847/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Swap/1044/15.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Swap/1044/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/847/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/847/loser.png", "save_id": 1044, "prompt_en": "Swap the positions of the red apple and the green pear.", "prompt_cn": "交换红色苹果和绿色梨的位置。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 848, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Swap/34.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/848/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Swap/1065/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Swap/1065/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/848/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/848/loser.png", "save_id": 1065, "prompt_en": "Swap the materials of the chair and the table.", "prompt_cn": "将椅子和桌子的材质互相交换。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 849, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Swap/34.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/849/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Swap/1065/14.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Swap/1065/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/849/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/849/loser.png", "save_id": 1065, "prompt_en": "Swap the materials of the chair and the table.", "prompt_cn": "将椅子和桌子的材质互相交换。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 850, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Swap/37.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/850/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Swap/1068/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Swap/1068/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/850/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/850/loser.png", "save_id": 1068, "prompt_en": "Swap the blue pillow and the red pillow.", "prompt_cn": "交换蓝色枕头和红色枕头的位置。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 851, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Swap/38.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/851/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Swap/1069/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Swap/1069/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/851/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/851/loser.png", "save_id": 1069, "prompt_en": "Swap the colors of the cube and the sphere.", "prompt_cn": "交换立方体和球体的颜色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 852, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Swap/41.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/852/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Swap/1071/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Swap/1071/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/852/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/852/loser.png", "save_id": 1071, "prompt_en": "Swap the upholstery material of the couch and the armchair.", "prompt_cn": "在沙发和扶手椅之间交换软包材质。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 853, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Swap/48.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/853/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Swap/1077/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Swap/1077/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/853/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/853/loser.png", "save_id": 1077, "prompt_en": "Swap the contents inside the picture frame.", "prompt_cn": "交换画框中的内容。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 854, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Swap/48.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/854/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Swap/1077/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Swap/1077/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/854/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/854/loser.png", "save_id": 1077, "prompt_en": "Swap the contents inside the picture frame.", "prompt_cn": "交换画框中的内容。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 855, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Swap/71.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/855/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Swap/1099/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Swap/1099/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/855/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/855/loser.png", "save_id": 1099, "prompt_en": "Swap the eye states of the two people.", "prompt_cn": "交换两个人眼睛状态。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 856, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part2/Swap/72.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/856/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Swap/1100/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part2/Swap/1100/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/856/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/856/loser.png", "save_id": 1100, "prompt_en": "Swap the colors of the two umbrellas.", "prompt_cn": "交换两把伞的颜色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 857, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/1.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/857/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1599/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1599/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/857/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/857/loser.png", "save_id": 1599, "prompt_en": "Change the title '江湖往事' to '剑影萍踪'", "prompt_cn": "将标题文字“江湖往事”更改为“剑影萍踪”。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 858, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/7.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/858/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1611/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1611/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/858/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/858/tie_2.png", "save_id": 1611, "prompt_en": "Change the title '纵横四海' to '英雄本色'", "prompt_cn": "将标题文字从“纵横四海”修改为“英雄本色”。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 859, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/101.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/859/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1612/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1612/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/859/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/859/loser.png", "save_id": 1612, "prompt_en": "Add large white handwritten text reading “致我们终将逝去的青春” at the center of the sky.", "prompt_cn": "在天空中央加入巨大的白色手写体文字“致我们终将逝去的青春”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 860, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/101.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/860/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1612/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1612/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/860/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/860/tie_2.png", "save_id": 1612, "prompt_en": "Add large white handwritten text reading “致我们终将逝去的青春” at the center of the sky.", "prompt_cn": "在天空中央加入巨大的白色手写体文字“致我们终将逝去的青春”。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 861, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/102.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/861/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1616/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1616/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/861/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/861/tie_2.png", "save_id": 1616, "prompt_en": "“Add Chinese cursive calligraphy text reading “孤独是最长的旅程” at the top center of the poster, emitting a soft beige glow.”", "prompt_cn": "在海报正上方中央加入发出柔和米色光芒的中国书法行草文字“孤独是最长的旅程”。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 862, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/10.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/862/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1617/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1617/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/862/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/862/loser.png", "save_id": 1617, "prompt_en": "Erase the Chinese text “孤独是最长的旅程” at the center of the image.", "prompt_cn": "擦除画面中央的中文文字“孤独是最长的旅程”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 863, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/12.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/863/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1620/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1620/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/863/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/863/tie_2.png", "save_id": 1620, "prompt_en": "Add a smaller line of gold text reading “湖山臻境 私享人生” below the main title at the top of the image.", "prompt_cn": "在画面上方主标题下方添加较小金色文字\"湖山臻境 私享人生\"", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 864, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/12.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/864/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1622/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1622/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/864/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/864/loser.png", "save_id": 1622, "prompt_en": "Change '盛大开盘' to '即将售完'.", "prompt_cn": "将文字“盛大开盘”修改为“即将售完”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 865, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/17.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/865/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1630/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1630/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/865/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/865/tie_2.png", "save_id": 1630, "prompt_en": "Add yellow handwritten-style text reading ‘数量有限 先到先得’ in the center of the road at the bottom of the image.", "prompt_cn": "在底部马路中央印上黄色手写体\"数量有限 先到先得\"", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 866, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/18.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/866/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1632/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1632/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/866/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/866/loser.png", "save_id": 1632, "prompt_en": "Erase the black Chinese text “立即免费试听” from the central yellow button.", "prompt_cn": "擦除中间黄色按钮上的黑色中文“立即免费试听”字样", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 867, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/24.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/867/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1644/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1644/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/867/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/867/loser.png", "save_id": 1644, "prompt_en": "Change '改革开放四十年' in the title to '新时代风貌'", "prompt_cn": "将标题中的“改革开放四十年”更改为“新时代风貌”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 868, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/25.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/868/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1647/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1647/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/868/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/868/tie_2.png", "save_id": 1647, "prompt_en": "Change '古籍新生' to '纸上春秋'", "prompt_cn": "将文字“古籍新生”更改为“纸上春秋”。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 869, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/27.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/869/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1649/14.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1649/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/869/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/869/loser.png", "save_id": 1649, "prompt_en": "Remove the text ‘丝路辉煌’ from the image.", "prompt_cn": "移除“丝路辉煌”的文字。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 870, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/27.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/870/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1650/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1650/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/870/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/870/tie_2.png", "save_id": 1650, "prompt_en": "Change the title '丝路辉煌' to '西域传奇'", "prompt_cn": "将标题“丝路辉煌”更改为“西域传奇”。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 871, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/30.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/871/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1653/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1653/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/871/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/871/loser.png", "save_id": 1653, "prompt_en": "Remove the black Chinese title text “现代主义边界” at the top of the poster.", "prompt_cn": "移除海报顶部标题中“现代主义边界”的黑色汉字", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 872, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/33.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/872/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1658/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1658/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/872/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/872/loser.png", "save_id": 1658, "prompt_en": "Remove the white Chinese text “厨余垃圾” from the green bin front.", "prompt_cn": "移除绿色垃圾桶正面底部白色“厨余垃圾”中文字样。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 873, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/34.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/873/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1659/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1659/15.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/873/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/873/tie_2.png", "save_id": 1659, "prompt_en": "Change the poem line to '厉行节约,反对浪费'", "prompt_cn": "将诗句修改为“厉行节约,反对浪费”。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 874, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/36.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/874/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1662/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1662/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/874/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/874/loser.png", "save_id": 1662, "prompt_en": "Change the slogan to '生命在于运动'", "prompt_cn": "将标语更改为“生命在于运动”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 875, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/37.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/875/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1663/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1663/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/875/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/875/loser.png", "save_id": 1663, "prompt_en": "Remove the large black Chinese slogan “不听、不信、不转账” from the center of the poster.", "prompt_cn": "移除海报中间大号黑色中文“不听、不信、不转账”字样", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 876, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/38.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/876/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1666/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1666/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/876/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/876/tie_2.png", "save_id": 1666, "prompt_en": "Change the slogan to '无偿献血,拯救生命'", "prompt_cn": "将标语更改为“无偿献血,拯救生命”。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 877, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/49.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/877/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1679/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1679/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/877/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/877/tie_2.png", "save_id": 1679, "prompt_en": "Change the text to '童心未泯,一切皆甜'.", "prompt_cn": "将文字更改为“童心未泯,一切皆甜”。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 878, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/49.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/878/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1679/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1679/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/878/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/878/loser.png", "save_id": 1679, "prompt_en": "Change the text to '童心未泯,一切皆甜'.", "prompt_cn": "将文字更改为“童心未泯,一切皆甜”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 879, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/57.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/879/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1687/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1687/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/879/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/879/loser.png", "save_id": 1687, "prompt_en": "Write the words '麻辣鲜香' in gold on the front exterior of the hotpot copper pot.", "prompt_cn": "在火锅铜盆外壁正面写上金色\"麻辣鲜香\"字体。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 880, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/59.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/880/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1691/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1691/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/880/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/880/loser.png", "save_id": 1691, "prompt_en": "Change the departure date '8月1日' to '9月15日'", "prompt_cn": "将出发日期从“8月1日”更改为“9月15日”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 881, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/71.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/881/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1704/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1704/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/881/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/881/tie_2.png", "save_id": 1704, "prompt_en": "Change the text '中华美食嘉年华' to '国际啤酒炸鸡节'.", "prompt_cn": "将文字“中华美食嘉年华”修改为“国际啤酒炸鸡节”。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 882, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/75.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/882/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1708/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1708/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/882/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/882/loser.png", "save_id": 1708, "prompt_en": "Change the slogan to '轻盈生活,从吃素开始'", "prompt_cn": "将标语更改为“轻盈生活,从吃素开始”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 883, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/78.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/883/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1711/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1711/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/883/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/883/tie_2.png", "save_id": 1711, "prompt_en": "Add bold red text reading “预约享受九折优惠” above the text “七夕节”.", "prompt_cn": "在文字“七夕节”上方添加红色粗体文字“预约享受九折优惠”。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 884, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/80.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/884/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1714/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1714/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/884/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/884/loser.png", "save_id": 1714, "prompt_en": "Change '重庆味' to '成都味'", "prompt_cn": "将文字“重庆味”修改为“成都味”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 885, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/91.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/885/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1727/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1727/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/885/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/885/loser.png", "save_id": 1727, "prompt_en": "Change the text '迷笛音乐节' to '草莓音乐节'.", "prompt_cn": "将文字“迷笛音乐节”修改为“草莓音乐节”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 886, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_cn/92.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/886/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1728/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_CN/1728/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/886/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/886/loser.png", "save_id": 1728, "prompt_en": "Change the orchestra name to '中国爱乐乐团'.", "prompt_cn": "将乐团名称更改为“中国爱乐乐团”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 887, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/5.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/887/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1745/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1745/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/887/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/887/loser.png", "save_id": 1745, "prompt_en": "Remove the large white uppercase slogan text “A FLUFFY ADVENTURE - COMING SOON” at the bottom center.", "prompt_cn": "移除底部中央白色大写英文标语“A FLUFFY ADVENTURE - COMING SOON”", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 888, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/6.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/888/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1746/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1746/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/888/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/888/tie_2.png", "save_id": 1746, "prompt_en": "Change the title 'OCEAN' to 'DESERT'.", "prompt_cn": "将标题“OCEAN”更改为“DESERT”。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 889, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/6.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/889/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1746/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1746/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/889/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/889/loser.png", "save_id": 1746, "prompt_en": "Change the title 'OCEAN' to 'DESERT'.", "prompt_cn": "将标题“OCEAN”更改为“DESERT”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 890, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/6.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/890/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1746/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1746/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/890/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/890/loser.png", "save_id": 1746, "prompt_en": "Change the title 'OCEAN' to 'DESERT'.", "prompt_cn": "将标题“OCEAN”更改为“DESERT”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 891, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/6.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/891/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1746/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1746/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/891/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/891/tie_2.png", "save_id": 1746, "prompt_en": "Change the title 'OCEAN' to 'DESERT'.", "prompt_cn": "将标题“OCEAN”更改为“DESERT”。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 892, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/8.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/892/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1749/14.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1749/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/892/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/892/loser.png", "save_id": 1749, "prompt_en": "Remove the large yellow English text and numbers “5 STARS” at the lower left corner.", "prompt_cn": "移除左下角大号的黄色英文数字与单词“5 STARS”", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 893, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/15.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/893/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1756/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1756/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/893/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/893/tie_2.png", "save_id": 1756, "prompt_en": "Add the light brown English phrase 'Music for the soul' in the lower-left corner.", "prompt_cn": "在左下角添加浅棕色英文短句“Music for the soul”。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 894, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/15.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/894/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1756/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1756/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/894/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/894/loser.png", "save_id": 1756, "prompt_en": "Add the light brown English phrase 'Music for the soul' in the lower-left corner.", "prompt_cn": "在左下角添加浅棕色英文短句“Music for the soul”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 895, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/17.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/895/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1759/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1759/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/895/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/895/loser.png", "save_id": 1759, "prompt_en": "Change 'East Coast' to 'West Coast'.", "prompt_cn": "将文字“East Coast”修改为“West Coast”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 896, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/27.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/896/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1773/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1773/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/896/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/896/loser.png", "save_id": 1773, "prompt_en": "Write bold black English words “Luxury Estate” inside the white blank strip under the FOR SALE text.", "prompt_cn": "在红色“FOR SALE”牌子下方空白白条内写上加粗黑色英文“Luxury Estate”", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 897, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/29.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/897/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1776/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1776/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/897/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/897/tie_2.png", "save_id": 1776, "prompt_en": "Change 'Spring Look' to 'Autumn Look'.", "prompt_cn": "将文字“Spring Look”更改为“Autumn Look”。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 898, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/29.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/898/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1776/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1776/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/898/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/898/loser.png", "save_id": 1776, "prompt_en": "Change 'Spring Look' to 'Autumn Look'.", "prompt_cn": "将文字“Spring Look”更改为“Autumn Look”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 899, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/35.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/899/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1785/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1785/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/899/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/899/loser.png", "save_id": 1785, "prompt_en": "Change the word 'Mars' to 'Moon'.", "prompt_cn": "将单词“Mars”修改为“ Moon”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 900, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/35.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/900/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1785/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1785/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/900/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/900/loser.png", "save_id": 1785, "prompt_en": "Change the word 'Mars' to 'Moon'.", "prompt_cn": "将单词“Mars”修改为“ Moon”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 901, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/35.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/901/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1785/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1785/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/901/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/901/tie_2.png", "save_id": 1785, "prompt_en": "Change the word 'Mars' to 'Moon'.", "prompt_cn": "将单词“Mars”修改为“ Moon”。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 902, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/37.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/902/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1787/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1787/12.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/902/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/902/loser.png", "save_id": 1787, "prompt_en": "Add a line of light-yellow text 'GUEST LECTURE SERIES' in the top-left corner of the blackboard.", "prompt_cn": "在黑板左上角添加一行浅黄色文字\"GUEST LECTURE SERIES\"。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 903, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/37.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/903/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1787/14.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1787/12.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/903/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/903/loser.png", "save_id": 1787, "prompt_en": "Add a line of light-yellow text 'GUEST LECTURE SERIES' in the top-left corner of the blackboard.", "prompt_cn": "在黑板左上角添加一行浅黄色文字\"GUEST LECTURE SERIES\"。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 904, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/37.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/904/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1787/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1787/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/904/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/904/tie_2.png", "save_id": 1787, "prompt_en": "Add a line of light-yellow text 'GUEST LECTURE SERIES' in the top-left corner of the blackboard.", "prompt_cn": "在黑板左上角添加一行浅黄色文字\"GUEST LECTURE SERIES\"。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 905, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/38.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/905/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1788/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1788/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/905/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/905/loser.png", "save_id": 1788, "prompt_en": "Erase the large black English title “SERIES A FUNDING”.", "prompt_cn": "擦除海报中央大号黑色标题“SERIES A FUNDING”英文文字。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 906, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/39.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/906/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1789/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1789/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/906/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/906/loser.png", "save_id": 1789, "prompt_en": "Erase the large red English title text “STOP THE HACKERS” at the top of the poster.", "prompt_cn": "擦除画面顶部巨大的红色英文标题“STOP THE HACKERS”文字", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 907, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/40.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/907/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1790/14.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1790/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/907/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/907/tie_2.png", "save_id": 1790, "prompt_en": "Add a line of thin white text 'Hall B, Downtown Convention Center' above the bottom text 'Starts at 9:00 AM'.", "prompt_cn": "在底部“Starts at 9:00 AM”上方再写一行细体白字“Hall B, Downtown Convention Center”。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 908, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/40.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/908/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1790/14.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1790/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/908/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/908/tie_2.png", "save_id": 1790, "prompt_en": "Add a line of thin white text 'Hall B, Downtown Convention Center' above the bottom text 'Starts at 9:00 AM'.", "prompt_cn": "在底部“Starts at 9:00 AM”上方再写一行细体白字“Hall B, Downtown Convention Center”。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 909, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/40.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/909/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1790/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1790/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/909/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/909/tie_2.png", "save_id": 1790, "prompt_en": "Add a line of thin white text 'Hall B, Downtown Convention Center' above the bottom text 'Starts at 9:00 AM'.", "prompt_cn": "在底部“Starts at 9:00 AM”上方再写一行细体白字“Hall B, Downtown Convention Center”。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 910, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/42.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/910/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1792/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1792/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/910/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/910/loser.png", "save_id": 1792, "prompt_en": "Change the destination 'Hawaii' to 'Maldives'.", "prompt_cn": "将目的地文字“HAWAII”更改为“MALDIVES”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 911, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/50.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/911/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1803/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1803/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/911/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/911/loser.png", "save_id": 1803, "prompt_en": "Change 'Hop on Hop off' to 'Night Tour'.", "prompt_cn": "将文字“Hop on Hop off”修改为“Night Tour”。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 912, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/52.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/912/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1806/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1806/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/912/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/912/loser.png", "save_id": 1806, "prompt_en": "Change 'Pepperoni' to 'Vegetarian'.", "prompt_cn": "将“Pepperoni”这个单词改为“Vegetarian”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 913, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/54.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/913/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1808/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1808/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/913/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/913/loser.png", "save_id": 1808, "prompt_en": "Remove the large white title text “Caffe Latte” at the top of the poster.", "prompt_cn": "移除海报上方大号白色标题“Caffe Latte”文字", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 914, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/55.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/914/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1809/15.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1809/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/914/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/914/loser.png", "save_id": 1809, "prompt_en": "Add the English title 'Special Menu' on the surface of the wooden table in the bottom-right corner, using the same font and style as 'Happy' in the image.", "prompt_cn": "在右下角木桌表面英文标题\"Special Menu\",字体和风格和图中的“Happy”保持一致。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 915, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/57.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/915/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1813/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1813/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/915/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/915/loser.png", "save_id": 1813, "prompt_en": "Change the word 'Meat' to 'Vegan'.", "prompt_cn": "将单词“Meat”改为“Vegan”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 916, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/63.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/916/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1821/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1821/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/916/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/916/loser.png", "save_id": 1821, "prompt_en": "Erase the red bold phrase “WORLD TITLE FIGHT”.", "prompt_cn": "擦除底部左侧红色粗体英文短语“WORLD TITLE FIGHT”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 917, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/65.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/917/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1824/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1824/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/917/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/917/tie_2.png", "save_id": 1824, "prompt_en": "Add the text “RACE DAY” in the top-left corner of the image, using white Cooper Black font with a bold, thick dark outline.", "prompt_cn": "在图片左上角添加文字“RACE DAY”,使用白色 Cooper Black 字体,并加上粗厚的深色描边。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 918, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/66.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/918/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1825/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1825/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/918/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/918/loser.png", "save_id": 1825, "prompt_en": "Remove the large orange “NEW YORK CITY MARATHON” English title text at the top.", "prompt_cn": "移除画面顶部橙色大字“NEW YORK CITY MARATHON”英文标题文字", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 919, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/69.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/919/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1830/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1830/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/919/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/919/loser.png", "save_id": 1830, "prompt_en": "Change the slogan 'Get Strong' to 'Get Fast'.", "prompt_cn": "将标语“Get Strong”更改为“Get Fast”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 920, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/72.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/920/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1832/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1832/12.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/920/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/920/loser.png", "save_id": 1832, "prompt_en": "Remove the English phrase “LOOK BOTH WAYS” inside the black rectangle at bottom left.", "prompt_cn": "移除左下角黑色矩形中的英文短语“LOOK BOTH WAYS”", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 921, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/74.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/921/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1834/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1834/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/921/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/921/loser.png", "save_id": 1834, "prompt_en": "Erase the green English slogan “KEEP OUR PARK CLEAN – RECYCLE TODAY!” from the top banner.", "prompt_cn": "擦除顶部横幅上绿色的英文标语“KEEP OUR PARK CLEAN – RECYCLE TODAY!”文本", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 922, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/102.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/922/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1840/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1840/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/922/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/922/tie_2.png", "save_id": 1840, "prompt_en": "Add the text “FIRES” after “NO”, matching the font style and color of “NO”.", "prompt_cn": "在文字“NO”后方添加“FIRES”,字体风格和颜色与“NO”保持一致。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 923, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/102.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/923/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1840/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1840/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/923/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/923/loser.png", "save_id": 1840, "prompt_en": "Add the text “FIRES” after “NO”, matching the font style and color of “NO”.", "prompt_cn": "在文字“NO”后方添加“FIRES”,字体风格和颜色与“NO”保持一致。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 924, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/81.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/924/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1842/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1842/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/924/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/924/tie_2.png", "save_id": 1842, "prompt_en": "Insert a centered line of slightly smaller gold uppercase text beneath the title: “CRIME MYSTERY SERIES”.", "prompt_cn": "在标题下方居中插入一行稍小的金色大写字母“CRIME MYSTERY SERIES”", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 925, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/103.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/925/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1846/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1846/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/925/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/925/tie_2.png", "save_id": 1846, "prompt_en": "Add the title “Little Rabbit” at the top center of the image using colorful bubble letters arranged in an arch.", "prompt_cn": "在图片上方中央加入标题“Little Rabbit”,采用五颜六色的气泡字母,并呈拱形排布。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 926, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/86.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/926/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1850/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1850/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/926/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/926/tie_2.png", "save_id": 1850, "prompt_en": "Change the word 'Crash' to 'Boom'.", "prompt_cn": "将单词“Crash”改为“Boom”。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 927, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/86.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/927/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1850/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1850/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/927/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/927/loser.png", "save_id": 1850, "prompt_en": "Change the word 'Crash' to 'Boom'.", "prompt_cn": "将单词“Crash”改为“Boom”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 928, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/89.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/928/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1853/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1853/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/928/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/928/loser.png", "save_id": 1853, "prompt_en": "Change '2023' to '2024'.", "prompt_cn": "将“2023”更改为“2026”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 929, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/92.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/929/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1856/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1856/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/929/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/929/loser.png", "save_id": 1856, "prompt_en": "Erase the black English text “Level 1” at the top of the parchment panel.", "prompt_cn": "擦除卷轴上顶部的“Level 1”黑色英文文字", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 930, "sampling_model": "Qwen-Image-Edit-R1", "source_image_ori": "Part3/Text_en/99.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/930/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1862/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit-R1/en/Part6/Visual_Text_EN/1862/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/930/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/930/tie_2.png", "save_id": 1862, "prompt_en": "Add a golden glowing English title 'DRAGON'S WATCH' in the top-left corner of the image.", "prompt_cn": "在图片的左上角添加金色发光英文标题\"DRAGON'S WATCH\"。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 931, "sampling_model": "Bagel", "source_image_ori": "Part2/Change_material/1.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/931/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/773/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/773/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/931/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/931/loser.png", "save_id": 773, "prompt_en": "Change the sofa material to red velvet.", "prompt_cn": "将沙发的材质更改为红色天鹅绒。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 932, "sampling_model": "Bagel", "source_image_ori": "Part2/Change_material/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/932/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/775/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/775/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/932/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/932/tie_2.png", "save_id": 775, "prompt_en": "Turn the dining chair into a wooden one.", "prompt_cn": "将这把餐椅变成一把木制餐椅。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 933, "sampling_model": "Bagel", "source_image_ori": "Part2/Change_material/7.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/933/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/779/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/779/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/933/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/933/tie_2.png", "save_id": 779, "prompt_en": "Replace the curtains with blue ones made of cotton-linen.", "prompt_cn": "把窗帘换成蓝色的棉麻材质。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 934, "sampling_model": "Bagel", "source_image_ori": "Part2/Change_material/15.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/934/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/785/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/785/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/934/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/934/tie_2.png", "save_id": 785, "prompt_en": "Change the bedside table to pine wood.", "prompt_cn": "将床头柜改成松木的。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 935, "sampling_model": "Bagel", "source_image_ori": "Part2/Change_material/25.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/935/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/792/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/792/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/935/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/935/tie_2.png", "save_id": 792, "prompt_en": "Change the jacket to leather.", "prompt_cn": "将外套改为皮质的。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 936, "sampling_model": "Bagel", "source_image_ori": "Part2/Change_material/25.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/936/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/792/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/792/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/936/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/936/loser.png", "save_id": 792, "prompt_en": "Change the jacket to leather.", "prompt_cn": "将外套改为皮质的。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 937, "sampling_model": "Bagel", "source_image_ori": "Part2/Change_material/32.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/937/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/796/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/796/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/937/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/937/tie_2.png", "save_id": 796, "prompt_en": "Change the basket to plastic.", "prompt_cn": "将篮子更改为塑料的。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 938, "sampling_model": "Bagel", "source_image_ori": "Part2/Change_material/44.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/938/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/806/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/806/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/938/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/938/loser.png", "save_id": 806, "prompt_en": "Make the cup paper.", "prompt_cn": "将这个杯子改成纸杯。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 939, "sampling_model": "Bagel", "source_image_ori": "Part2/Change_material/48.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/939/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/809/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/809/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/939/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/939/loser.png", "save_id": 809, "prompt_en": "Change the jug to glass.", "prompt_cn": "将水壶变成玻璃水壶。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 940, "sampling_model": "Bagel", "source_image_ori": "Part2/Change_material/51.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/940/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/812/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/812/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/940/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/940/loser.png", "save_id": 812, "prompt_en": "Change the elephant to jade.", "prompt_cn": "将这只大象修改为一只玉制大象。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 941, "sampling_model": "Bagel", "source_image_ori": "Part2/Change_material/54.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/941/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/814/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/814/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/941/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/941/loser.png", "save_id": 814, "prompt_en": "Change the crane to metal foil.", "prompt_cn": "将这只纸鹤改成由金属锡纸制作的。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 942, "sampling_model": "Bagel", "source_image_ori": "Part2/Change_material/59.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/942/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/818/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/818/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/942/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/942/tie_2.png", "save_id": 818, "prompt_en": "Change the floor to a wooden floor.", "prompt_cn": "将地板改为木质的。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 943, "sampling_model": "Bagel", "source_image_ori": "Part2/Change_material/64.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/943/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/821/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Material/821/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/943/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/943/tie_2.png", "save_id": 821, "prompt_en": "Change the toy car to plastic.", "prompt_cn": "将玩具车改为塑料材质。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 944, "sampling_model": "Bagel", "source_image_ori": "Part1/image/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/944/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/1/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/1/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/944/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/944/tie_2.png", "save_id": 1, "prompt_en": "Add an Adidas logo to the side of the white truck box.", "prompt_cn": "在卡车侧面的白色货厢上添加一个阿迪达斯标志。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 945, "sampling_model": "Bagel", "source_image_ori": "Part1/image/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/945/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/3/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/3/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/945/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/945/loser.png", "save_id": 3, "prompt_en": "Add a Los Angeles Lakers Kobe Bryant jersey inside the wooden frame.", "prompt_cn": "在墙上挂着的木质画框内,添加一件洛杉矶湖人队科比·布莱恩特的球衣。 ", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 946, "sampling_model": "Bagel", "source_image_ori": "Part1/image/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/946/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/3/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/3/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/946/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/946/loser.png", "save_id": 3, "prompt_en": "Add a Los Angeles Lakers Kobe Bryant jersey inside the wooden frame.", "prompt_cn": "在墙上挂着的木质画框内,添加一件洛杉矶湖人队科比·布莱恩特的球衣。 ", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 947, "sampling_model": "Bagel", "source_image_ori": "Part1/image/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/947/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/4/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/4/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/947/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/947/loser.png", "save_id": 4, "prompt_en": "Place a silver metal pen holder containing several black pens to the left of the computer.", "prompt_cn": "放置一只银色金属笔筒装几支黑笔在电脑的左边。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 948, "sampling_model": "Bagel", "source_image_ori": "Part1/image/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/948/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/4/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/4/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/948/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/948/loser.png", "save_id": 4, "prompt_en": "Place a silver metal pen holder containing several black pens to the left of the computer.", "prompt_cn": "放置一只银色金属笔筒装几支黑笔在电脑的左边。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 949, "sampling_model": "Bagel", "source_image_ori": "Part1/image/8.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/949/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/10/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/10/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/949/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/949/loser.png", "save_id": 10, "prompt_en": "Add a pink balloon to the panda’s hand on the far right.", "prompt_cn": "在最右边的熊猫手上加入一个粉色的气球。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 950, "sampling_model": "Bagel", "source_image_ori": "Part1/image/8.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/950/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/10/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/10/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/950/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/950/loser.png", "save_id": 10, "prompt_en": "Add a pink balloon to the panda’s hand on the far right.", "prompt_cn": "在最右边的熊猫手上加入一个粉色的气球。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 951, "sampling_model": "Bagel", "source_image_ori": "Part1/image/18.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/951/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/18/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/18/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/951/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/951/loser.png", "save_id": 18, "prompt_en": "Add a steaming ceramic mug of tea to the right of the book on the checkered tablecloth.", "prompt_cn": "在格子桌布上的书右边添加一个冒着热气的陶瓷茶杯。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 952, "sampling_model": "Bagel", "source_image_ori": "Part1/image/19.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/952/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/19/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/19/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/952/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/952/tie_2.png", "save_id": 19, "prompt_en": "Add a happy, jumping golden retriever next to the worker.", "prompt_cn": "在工人旁边添加一只快乐跳跃的金毛犬。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 953, "sampling_model": "Bagel", "source_image_ori": "Part1/image/25.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/953/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/26/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/26/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/953/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/953/loser.png", "save_id": 26, "prompt_en": "Place a basketball on the empty ground to the left of the court.", "prompt_cn": "在篮球场左侧空地上放一个篮球。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 954, "sampling_model": "Bagel", "source_image_ori": "Part1/image/26.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/954/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/27/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/27/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/954/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/954/loser.png", "save_id": 27, "prompt_en": "Add a seagull flying above the distant water on the left side of the boat", "prompt_cn": "在小船左侧远处的海面上添加一只海鸥", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 955, "sampling_model": "Bagel", "source_image_ori": "Part1/image/28.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/955/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/29/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/29/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/955/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/955/tie_2.png", "save_id": 29, "prompt_en": "Add a picnic basket on the sand in front left of the table", "prompt_cn": "在桌子左前方的沙滩上添加一个小野餐篮", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 956, "sampling_model": "Bagel", "source_image_ori": "Part1/image/34.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/956/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/35/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/35/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/956/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/956/tie_2.png", "save_id": 35, "prompt_en": "Add a kitten on the stool next to the dressing table chair.", "prompt_cn": "在梳妆台椅子旁的凳子上加入一只小猫。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 957, "sampling_model": "Bagel", "source_image_ori": "Part1/image/35.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/957/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/37/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/37/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/957/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/957/tie_2.png", "save_id": 37, "prompt_en": "Add a Starbucks iced latte with the logo visible on the table.", "prompt_cn": "在桌子上加入一杯带标志的星巴克冰拿铁 。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 958, "sampling_model": "Bagel", "source_image_ori": "Part1/image/42.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/958/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/44/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/44/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/958/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/958/tie_2.png", "save_id": 44, "prompt_en": "Add a transparent glass baby bottle to the right of the stacked clothes.", "prompt_cn": "在叠好的衣服右方加入一只透明玻璃奶瓶", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 959, "sampling_model": "Bagel", "source_image_ori": "Part1/image/49.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/959/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/51/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/51/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/959/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/959/loser.png", "save_id": 51, "prompt_en": "Add a small silver metal trash bin in the bottom right corner.", "prompt_cn": "在右下角添加一个银色金属垃圾桶。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 960, "sampling_model": "Bagel", "source_image_ori": "Part1/image/49.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/960/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/51/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/51/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/960/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/960/tie_2.png", "save_id": 51, "prompt_en": "Add a small silver metal trash bin in the bottom right corner.", "prompt_cn": "在右下角添加一个银色金属垃圾桶。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 961, "sampling_model": "Bagel", "source_image_ori": "Part1/image/55.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/961/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/56/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/56/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/961/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/961/tie_2.png", "save_id": 56, "prompt_en": "Add a bag of chips to the left of the cat.", "prompt_cn": "在猫咪的左边加入一包薯片", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 962, "sampling_model": "Bagel", "source_image_ori": "Part1/image/59.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/962/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/60/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/60/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/962/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/962/loser.png", "save_id": 60, "prompt_en": "Place a Los Angeles Lakers LeBron James jersey on the yellow lounge chair.", "prompt_cn": "在黄色躺椅上放一件湖人队勒布朗·詹姆斯的球衣。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 963, "sampling_model": "Bagel", "source_image_ori": "Part1/image/65.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/963/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/66/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/66/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/963/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/963/tie_2.png", "save_id": 66, "prompt_en": "Add a Santa Claus figurine to the bottom-right corner.", "prompt_cn": "在右下角加入一个圣诞老人人偶。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 964, "sampling_model": "Bagel", "source_image_ori": "Part1/image/68.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/964/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/68/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/68/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/964/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/964/loser.png", "save_id": 68, "prompt_en": "Add a pair of white Nike Air Force 1 sneakers on the desk.", "prompt_cn": "在桌子上加入一双 Nike Air Force 1 白色球鞋。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 965, "sampling_model": "Bagel", "source_image_ori": "Part1/image/83.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/965/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/71/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/71/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/965/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/965/loser.png", "save_id": 71, "prompt_en": "Place a small quadcopter drone in front of the red door.", "prompt_cn": "在红色的门前放置一架小型四轴无人机。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 966, "sampling_model": "Bagel", "source_image_ori": "Part1/image/104.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/966/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/75/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/75/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/966/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/966/tie_2.png", "save_id": 75, "prompt_en": "Add a blue-and-white WeChat QR code stand above the sponges.", "prompt_cn": "在海绵上方空白处添加一个蓝白相间的微信二维码立牌。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 967, "sampling_model": "Bagel", "source_image_ori": "Part1/image/120.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/967/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/77/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/77/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/967/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/967/tie_2.png", "save_id": 77, "prompt_en": "Add a Frozen movie poster on the wall on the left.", "prompt_cn": "在左边的墙上加入一个冰雪奇缘的海报。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 968, "sampling_model": "Bagel", "source_image_ori": "Part1/image/148.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/968/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/80/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/80/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/968/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/968/tie_2.png", "save_id": 80, "prompt_en": "Add a Minion to the road.", "prompt_cn": "在道路上加入一个小黄人。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 969, "sampling_model": "Bagel", "source_image_ori": "Part1/image/242.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/969/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/83/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/83/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/969/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/969/loser.png", "save_id": 83, "prompt_en": "Add a ping pong paddle on the ping pong table.", "prompt_cn": "在乒乓球桌上加入一个乒乓球拍。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 970, "sampling_model": "Bagel", "source_image_ori": "Part1/image/266.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/970/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/90/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/90/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/970/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/970/loser.png", "save_id": 90, "prompt_en": "Add a lone wind turbine in the middle distance on the right side of the field.", "prompt_cn": "在农田右侧中景位置添加一座孤立的风力发电机。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 971, "sampling_model": "Bagel", "source_image_ori": "Part1/image/267.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/971/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/91/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/91/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/971/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/971/loser.png", "save_id": 91, "prompt_en": "Add a blue cushion on the floor in front of the cabinet.", "prompt_cn": "在柜子前面的地板上添加一个蓝色坐垫。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 972, "sampling_model": "Bagel", "source_image_ori": "Part1/image/280.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/972/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/93/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/93/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/972/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/972/tie_2.png", "save_id": 93, "prompt_en": "Add a pink baby stroller to the bottom-right corner of the image.", "prompt_cn": "在图片的右下角加入一个粉色的婴儿车。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 973, "sampling_model": "Bagel", "source_image_ori": "Part2/Add_Copy/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/973/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/112/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/112/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/973/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/973/loser.png", "save_id": 112, "prompt_en": "Add another identical coffee mug next to it.", "prompt_cn": "在现有咖啡杯旁边再添加一个相同的咖啡杯。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 974, "sampling_model": "Bagel", "source_image_ori": "Part2/Add_Copy/2.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/974/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/113/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/113/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/974/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/974/loser.png", "save_id": 113, "prompt_en": "Place an identical armchair opposite the existing one.", "prompt_cn": "在现有的扶手椅对面放置一把一模一样的扶手椅。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 975, "sampling_model": "Bagel", "source_image_ori": "Part2/Add_Copy/6.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/975/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/117/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/117/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/975/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/975/loser.png", "save_id": 117, "prompt_en": "Add another identical picnic basket on the opposite corner.", "prompt_cn": "在毯子对角的另一角落添加一个相同的野餐篮。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 976, "sampling_model": "Bagel", "source_image_ori": "Part2/Add_Copy/8.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/976/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/119/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/119/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/976/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/976/loser.png", "save_id": 119, "prompt_en": "Add an identical burger next to the existing one.", "prompt_cn": "在现在的汉堡旁边加入一个和现在汉堡一模一样的汉堡。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 977, "sampling_model": "Bagel", "source_image_ori": "Part2/Add_Copy/10.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/977/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/121/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/121/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/977/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/977/tie_2.png", "save_id": 121, "prompt_en": "Place a pillow identical to the one on the left side of the sofa in the right corner of the sofa.”", "prompt_cn": "在沙发右侧角落放置一个和沙发左侧相同的抱枕。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 978, "sampling_model": "Bagel", "source_image_ori": "Part2/Add_Copy/13.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/978/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/124/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/124/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/978/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/978/loser.png", "save_id": 124, "prompt_en": "Add another golden retriever sitting beside the first one.", "prompt_cn": "在第一只金毛猎犬旁边添加另一只坐着的金毛猎犬。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 979, "sampling_model": "Bagel", "source_image_ori": "Part2/Add_Copy/13.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/979/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/124/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/124/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/979/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/979/loser.png", "save_id": 124, "prompt_en": "Add another golden retriever sitting beside the first one.", "prompt_cn": "在第一只金毛猎犬旁边添加另一只坐着的金毛猎犬。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 980, "sampling_model": "Bagel", "source_image_ori": "Part2/Add_Copy/15.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/980/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/126/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/126/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/980/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/980/loser.png", "save_id": 126, "prompt_en": "Place an identical candle holder on the right end of the shelf.", "prompt_cn": "在木板架的右端放置一个相同的烛台。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 981, "sampling_model": "Bagel", "source_image_ori": "Part2/Add_Copy/17.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/981/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/128/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/128/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/981/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/981/tie_2.png", "save_id": 128, "prompt_en": "Build another identical snowman next to the first one.", "prompt_cn": "在第一个雪人旁边再堆一个相同的雪人。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 982, "sampling_model": "Bagel", "source_image_ori": "Part2/Add_Copy/17.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/982/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/128/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/128/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/982/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/982/loser.png", "save_id": 128, "prompt_en": "Build another identical snowman next to the first one.", "prompt_cn": "在第一个雪人旁边再堆一个相同的雪人。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 983, "sampling_model": "Bagel", "source_image_ori": "Part2/Add_Copy/19.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/983/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/130/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/130/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/983/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/983/loser.png", "save_id": 130, "prompt_en": "Duplicate an identical Pikachu next to the existing one.", "prompt_cn": "在现在皮卡丘的旁边复制一个相同的皮卡丘。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 984, "sampling_model": "Bagel", "source_image_ori": "Part2/Add_Copy/19.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/984/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/130/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/130/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/984/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/984/loser.png", "save_id": 130, "prompt_en": "Duplicate an identical Pikachu next to the existing one.", "prompt_cn": "在现在皮卡丘的旁边复制一个相同的皮卡丘。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 985, "sampling_model": "Bagel", "source_image_ori": "Part2/Add_Copy/20.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/985/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/131/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/ADD/131/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/985/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/985/tie_2.png", "save_id": 131, "prompt_en": "Copy an identical game console next to the current one", "prompt_cn": "在现在游戏机的旁边复制一个相同的游戏机。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 986, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/4.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/986/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/900/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/900/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/986/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/986/tie_2.png", "save_id": 900, "prompt_en": "Make the Minion hold a bouquet of flowers.", "prompt_cn": "让小黄人拿着一束花。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 987, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/5.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/987/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/901/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/901/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/987/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/987/loser.png", "save_id": 901, "prompt_en": "Make the boy look like he is talking on the phone.", "prompt_cn": "让男孩看起来好像正在打电话。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 988, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/6.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/988/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/902/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/902/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/988/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/988/loser.png", "save_id": 902, "prompt_en": "Make this Pikachu jump up to pick the fruit.", "prompt_cn": "让这只皮卡丘跳起来去摘水果。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 989, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/8.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/989/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/904/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/904/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/989/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/989/loser.png", "save_id": 904, "prompt_en": "Have the dog place its paws on the computer and work hard.", "prompt_cn": "让这只狗把爪子放在电脑上,看起来在努力工作。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 990, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/8.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/990/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/904/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/904/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/990/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/990/loser.png", "save_id": 904, "prompt_en": "Have the dog place its paws on the computer and work hard.", "prompt_cn": "让这只狗把爪子放在电脑上,看起来在努力工作。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 991, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/11.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/991/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/907/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/907/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/991/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/991/loser.png", "save_id": 907, "prompt_en": "Have the girl grip the lat pulldown machine with her hands.", "prompt_cn": "让这个女孩用手握住高位下拉的器械。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 992, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/13.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/992/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/909/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/909/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/992/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/992/loser.png", "save_id": 909, "prompt_en": "Have the girl start doing seated hip abductions.", "prompt_cn": "让女孩开始进行坐姿髋外展动作。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 993, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/14.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/993/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/910/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/910/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/993/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/993/loser.png", "save_id": 910, "prompt_en": "Change the girl’s pose to a horse stance.", "prompt_cn": "将女孩的姿势改为马步。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 994, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/16.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/994/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/912/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/912/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/994/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/994/loser.png", "save_id": 912, "prompt_en": "Make the girl open her eyes and sit up.", "prompt_cn": "让这个女孩睁开眼睛并坐起来。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 995, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/27.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/995/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/922/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/922/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/995/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/995/loser.png", "save_id": 922, "prompt_en": "Change the fist into an open palm.", "prompt_cn": "将握拳变为张开的手掌。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 996, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/27.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/996/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/922/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/922/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/996/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/996/loser.png", "save_id": 922, "prompt_en": "Change the fist into an open palm.", "prompt_cn": "将握拳变为张开的手掌。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 997, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/30.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/997/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/925/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/925/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/997/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/997/loser.png", "save_id": 925, "prompt_en": "Have the boy cross his legs.", "prompt_cn": "让这个男生跷起二郎腿。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 998, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/31.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/998/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/926/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/926/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/998/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/998/loser.png", "save_id": 926, "prompt_en": "Adjust the girl’s pose so that she is looking straight ahead.", "prompt_cn": "让这个女生向前看.", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 999, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/34.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/999/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/929/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/929/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/999/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/999/loser.png", "save_id": 929, "prompt_en": "Have the man wave goodbye to his friend.", "prompt_cn": "让这位男士挥手向朋友告别。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1000, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/34.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1000/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/929/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/929/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1000/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1000/loser.png", "save_id": 929, "prompt_en": "Have the man wave goodbye to his friend.", "prompt_cn": "让这位男士挥手向朋友告别。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1001, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/35.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1001/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/930/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/930/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1001/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1001/tie_2.png", "save_id": 930, "prompt_en": "Make the woman do a split on the mat.", "prompt_cn": "让这位女性在垫子上做一个劈叉动作。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1002, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/35.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1002/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/930/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/930/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1002/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1002/loser.png", "save_id": 930, "prompt_en": "Make the woman do a split on the mat.", "prompt_cn": "让这位女性在垫子上做一个劈叉动作。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1003, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/35.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1003/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/930/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/930/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1003/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1003/loser.png", "save_id": 930, "prompt_en": "Make the woman do a split on the mat.", "prompt_cn": "让这位女性在垫子上做一个劈叉动作。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1004, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/38.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1004/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/933/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/933/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1004/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1004/loser.png", "save_id": 933, "prompt_en": "Make the girl in the act of bending down to pick up the toy.", "prompt_cn": "让这个女孩正在弯腰捡地上的玩具。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1005, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/45.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1005/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/940/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/940/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1005/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1005/loser.png", "save_id": 940, "prompt_en": "Have the dog lie down on the grass.", "prompt_cn": "让这只狗卧在草地上。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1006, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/45.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1006/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/940/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/940/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1006/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1006/loser.png", "save_id": 940, "prompt_en": "Have the dog lie down on the grass.", "prompt_cn": "让这只狗卧在草地上。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1007, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/49.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1007/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/944/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/944/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1007/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1007/loser.png", "save_id": 944, "prompt_en": "Have the cat lie down on the lawn.", "prompt_cn": "让这只猫卧在草坪上。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1008, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/49.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1008/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/944/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/944/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1008/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1008/loser.png", "save_id": 944, "prompt_en": "Have the cat lie down on the lawn.", "prompt_cn": "让这只猫卧在草坪上。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1009, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/54.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1009/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/951/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/951/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1009/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1009/tie_2.png", "save_id": 951, "prompt_en": "Have the girl spread her arms.", "prompt_cn": "让这个女生张开双臂。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1010, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/56.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1010/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/950/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/950/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1010/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1010/loser.png", "save_id": 950, "prompt_en": "Have the boy blow up the balloon in his hand using his mouth.", "prompt_cn": "让这个男孩用嘴吹起他手中的气球。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1011, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/65.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1011/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/960/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/960/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1011/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1011/loser.png", "save_id": 960, "prompt_en": "Have this boy kneeling on one knee.", "prompt_cn": "让这个男生单膝下跪", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1012, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/70.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1012/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/965/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/965/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1012/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1012/loser.png", "save_id": 965, "prompt_en": "Have this boy take a starting position for a 100-meter sprint.", "prompt_cn": "让这个男生做出百米起跑的动作。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1013, "sampling_model": "Bagel", "source_image_ori": "Part1/image/76.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1013/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/827/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/827/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1013/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1013/loser.png", "save_id": 827, "prompt_en": "Keep the globe and all dolls unchanged, and change the background to a night sky with a clear Milky Way and nebulae.", "prompt_cn": "保持地球仪和所有玩偶不变,将背景改为夜晚星空,有清晰的银河和星云。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1014, "sampling_model": "Bagel", "source_image_ori": "Part1/image/105.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1014/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/833/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/833/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1014/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1014/tie_2.png", "save_id": 833, "prompt_en": "Keep the fruits and bowl unchanged, and change the background to an indoor kitchen countertop with cabinets.", "prompt_cn": "保持水果和碗不变,将背景改为室内厨房料理台和橱柜。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1015, "sampling_model": "Bagel", "source_image_ori": "Part1/image/156.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1015/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/841/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/841/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1015/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1015/loser.png", "save_id": 841, "prompt_en": "Keep the lighthouse unchanged, and change the background to a snowy Arctic landscape with icebergs.", "prompt_cn": "保持灯塔不变,将背景改为带有冰山的北极雪景。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1016, "sampling_model": "Bagel", "source_image_ori": "Part1/image/204.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1016/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/846/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/846/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1016/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1016/loser.png", "save_id": 846, "prompt_en": "Keep the girl unchanged and change the background to a desert with golden sand dunes covering the ground.", "prompt_cn": "保持女子不变,将背景改为沙漠,地面覆盖着金黄色的沙丘。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1017, "sampling_model": "Bagel", "source_image_ori": "Part1/image/265.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1017/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/848/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/848/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1017/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1017/loser.png", "save_id": 848, "prompt_en": " Change the background to a snowy landscape while keeping the rabbits and basket unchanged.", "prompt_cn": "将背景更换为下雪的冬季景色,同时保持兔子和篮子不变。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1018, "sampling_model": "Bagel", "source_image_ori": "Part1/image/385.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1018/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/856/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/856/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1018/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1018/loser.png", "save_id": 856, "prompt_en": "Change the dog’s background to a spring park with blooming cherry blossoms.", "prompt_cn": "将这只戴眼镜的狗移动到春季樱花盛开的公园中。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1019, "sampling_model": "Bagel", "source_image_ori": "Part1/image/390.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1019/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/860/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/860/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1019/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1019/loser.png", "save_id": 860, "prompt_en": "Place the stone house on a sunset beach overlooking the ocean.", "prompt_cn": "将这座石屋移到面朝大海的日落沙滩上。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1020, "sampling_model": "Bagel", "source_image_ori": "Part1/image/391.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1020/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/861/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/861/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1020/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1020/tie_2.png", "save_id": 861, "prompt_en": "Keep the mailbox, change the background to a busy city street corner with skyscrapers.", "prompt_cn": "保留邮箱主体,将背景换成繁忙城市街角的人行道和高楼", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1021, "sampling_model": "Bagel", "source_image_ori": "Part1/image/398.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1021/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/864/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/864/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1021/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1021/loser.png", "save_id": 864, "prompt_en": "Move the astronaut to a golden beach at sunset with gentle ocean waves.", "prompt_cn": "将宇航员移到日落时分的金色沙滩上", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1022, "sampling_model": "Bagel", "source_image_ori": "Part1/image/405.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1022/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/870/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/870/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1022/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1022/loser.png", "save_id": 870, "prompt_en": "Keep the beige sofa, white armchair, marble coffee table, and the items on it, the blue upholstered ottoman, and the side tables on both sides unchanged, and change the original indoor beige wall and wooden floor background in the image to a serene lakeside grassy outdoor setting.", "prompt_cn": "保持米色沙发、白色扶手椅、大理石咖啡桌及桌上物品、蓝色软垫脚凳、两侧的边桌不变,将图片中原有的室内米色墙壁和木地板背景更改为一个宁静的湖畔草地露天环境。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1023, "sampling_model": "Bagel", "source_image_ori": "Part1/image/405.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1023/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/870/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/870/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1023/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1023/loser.png", "save_id": 870, "prompt_en": "Keep the beige sofa, white armchair, marble coffee table, and the items on it, the blue upholstered ottoman, and the side tables on both sides unchanged, and change the original indoor beige wall and wooden floor background in the image to a serene lakeside grassy outdoor setting.", "prompt_cn": "保持米色沙发、白色扶手椅、大理石咖啡桌及桌上物品、蓝色软垫脚凳、两侧的边桌不变,将图片中原有的室内米色墙壁和木地板背景更改为一个宁静的湖畔草地露天环境。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1024, "sampling_model": "Bagel", "source_image_ori": "Part1/image/410.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1024/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/874/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/874/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1024/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1024/tie_2.png", "save_id": 874, "prompt_en": "Keep the open empty brown cardboard box unchanged, and change the rustic wooden floor and beige wall background in the image to a busy modern logistics warehouse with a concrete floor.", "prompt_cn": "保持打开的空棕色纸箱不变,将图片中质朴的木板地面和米色墙壁的背景更改为一个繁忙的现代化物流仓库的水泥地面上。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1025, "sampling_model": "Bagel", "source_image_ori": "Part1/image/412.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1025/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/876/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/876/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1025/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1025/loser.png", "save_id": 876, "prompt_en": "Keep the skateboarding LEGO Stormtrooper and its gear unchanged, and change the shopping mall interior background in the image to a sunny beach.", "prompt_cn": "保持正在滑板的乐高冲锋队员及其装备不变,将图片中商场内部的背景更改为一个阳光明媚的海滩。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1026, "sampling_model": "Bagel", "source_image_ori": "Part1/image/415.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1026/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/879/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/879/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1026/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1026/tie_2.png", "save_id": 879, "prompt_en": "Keep the sanitation worker wearing a fluorescent yellow jacket and the tool-filled cleaning cart he is pushing unchanged, and change the cobblestone street and brick building background in the image to a lively open-air market.", "prompt_cn": "保持穿着荧光黄夹克的环卫工人和他推着的装满工具的清洁车不变,将图片中鹅卵石街道和砖砌建筑的背景更改为一个热闹的露天市场。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1027, "sampling_model": "Bagel", "source_image_ori": "Part1/image/421.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1027/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/883/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/883/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1027/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1027/tie_2.png", "save_id": 883, "prompt_en": "Keep the person on the horse unchanged, and change the background to a golden beach at sunset.", "prompt_cn": "保持骑马的人不变,将背景改为夕阳下的金色海滩岸边。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1028, "sampling_model": "Bagel", "source_image_ori": "Part1/image/434.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1028/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/890/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_Background/890/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1028/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1028/loser.png", "save_id": 890, "prompt_en": "Keep the dog unchanged, and change the background to a snowy forest with tall pine trees.", "prompt_cn": "保持小狗不变,将背景改为有高大松树的雪树林。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1029, "sampling_model": "Bagel", "source_image_ori": "Part1/image/137.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1029/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/556/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/556/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1029/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1029/loser.png", "save_id": 556, "prompt_en": "Make the stuffed bunny on the bed twice as large.", "prompt_cn": "将床上的毛绒兔子尺寸放大为原来的两倍。\n", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1030, "sampling_model": "Bagel", "source_image_ori": "Part1/image/142.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1030/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/558/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/558/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1030/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1030/loser.png", "save_id": 558, "prompt_en": "Change the color of the plant question-mark sculpture to bright yellow.", "prompt_cn": "将植物问号雕塑的颜色改为亮黄色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1031, "sampling_model": "Bagel", "source_image_ori": "Part1/image/155.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1031/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/559/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/559/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1031/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1031/loser.png", "save_id": 559, "prompt_en": "Change the armchair on the right to light green.", "prompt_cn": "将右边的扶手椅改为浅绿色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1032, "sampling_model": "Bagel", "source_image_ori": "Part1/image/178.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1032/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/564/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/564/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1032/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1032/loser.png", "save_id": 564, "prompt_en": "Resize the teacup on the right to be the same size as the one on the left.", "prompt_cn": "让右边的茶杯变得和左边的一样大。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1033, "sampling_model": "Bagel", "source_image_ori": "Part1/image/202.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1033/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/566/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/566/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1033/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1033/loser.png", "save_id": 566, "prompt_en": "Adjust the girl's height to 4/5 of her current height.", "prompt_cn": "将这个女生的身高调整为当前的4/5。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1034, "sampling_model": "Bagel", "source_image_ori": "Part1/image/206.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1034/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/567/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/567/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1034/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1034/loser.png", "save_id": 567, "prompt_en": "Change the height of the chair next to the desk to 2/3 of its current height.", "prompt_cn": "将图中桌子旁的椅子高度改为当前的2/3。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1035, "sampling_model": "Bagel", "source_image_ori": "Part1/image/251.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1035/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/574/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/574/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1035/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1035/loser.png", "save_id": 574, "prompt_en": "Change the toothbrush handle to a solid, vivid sapphire blue color.", "prompt_cn": "将牙刷刷柄改成纯正的宝石蓝色", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1036, "sampling_model": "Bagel", "source_image_ori": "Part1/image/257.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1036/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/578/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/578/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1036/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1036/tie_2.png", "save_id": 578, "prompt_en": "Change the canvas bag in the image to light blue.", "prompt_cn": "将图中的帆布包改为淡蓝色。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1037, "sampling_model": "Bagel", "source_image_ori": "Part1/image/264.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1037/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/585/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/585/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1037/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1037/loser.png", "save_id": 585, "prompt_en": "Turn the painter’s T‑shirt color to black.", "prompt_cn": "把画工的T恤颜色改成黑色。\n", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1038, "sampling_model": "Bagel", "source_image_ori": "Part1/image/267.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1038/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/587/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/587/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1038/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1038/loser.png", "save_id": 587, "prompt_en": "Change the color of the wall to a clean solid white.", "prompt_cn": "将墙面颜色改成纯净的白色", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1039, "sampling_model": "Bagel", "source_image_ori": "Part1/image/267.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1039/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/587/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/587/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1039/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1039/loser.png", "save_id": 587, "prompt_en": "Change the color of the wall to a clean solid white.", "prompt_cn": "将墙面颜色改成纯净的白色", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1040, "sampling_model": "Bagel", "source_image_ori": "Part1/image/270.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1040/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/589/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/589/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1040/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1040/loser.png", "save_id": 589, "prompt_en": "Change the bookshelf color from terracotta to matte black.", "prompt_cn": "把书架从砖红色改为哑光黑。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1041, "sampling_model": "Bagel", "source_image_ori": "Part1/image/274.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1041/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/591/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/591/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1041/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1041/loser.png", "save_id": 591, "prompt_en": "Scale down the robot on the left to be the same size as the cup next to it.", "prompt_cn": "将左边的机器人缩小到和旁边的杯子一样大。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1042, "sampling_model": "Bagel", "source_image_ori": "Part1/image/275.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1042/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/592/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/592/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1042/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1042/loser.png", "save_id": 592, "prompt_en": "Resize the rocket toy to match the height of the adjacent backpack.", "prompt_cn": "让这个火箭玩具缩小到和旁边的书包一样高。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1043, "sampling_model": "Bagel", "source_image_ori": "Part1/image/277.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1043/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/593/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/593/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1043/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1043/loser.png", "save_id": 593, "prompt_en": "Replace the red pillow with a light yellow one.", "prompt_cn": "将红色的靠枕变为淡黄色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1044, "sampling_model": "Bagel", "source_image_ori": "Part1/image/279.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1044/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/595/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/595/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1044/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1044/loser.png", "save_id": 595, "prompt_en": "Enlarge the plant in the image to be as tall as the door handle.", "prompt_cn": "将图中的植物长大到和门把手一样高。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1045, "sampling_model": "Bagel", "source_image_ori": "Part1/image/285.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1045/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/597/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/597/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1045/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1045/loser.png", "save_id": 597, "prompt_en": "Enlarge the wicker picnic basket on the motorcycle’s rear seat to about twice its original size.", "prompt_cn": "把摩托车后座上的藤编野餐篮尺寸增大到大约原来的两倍。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1046, "sampling_model": "Bagel", "source_image_ori": "Part1/image/441.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1046/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/613/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/613/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1046/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1046/loser.png", "save_id": 613, "prompt_en": "Enlarge the brush in the image to twice its original size.", "prompt_cn": "将图中的刷子的尺寸扩大一倍。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1047, "sampling_model": "Bagel", "source_image_ori": "Part1/image/441.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1047/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/613/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/613/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1047/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1047/loser.png", "save_id": 613, "prompt_en": "Enlarge the brush in the image to twice its original size.", "prompt_cn": "将图中的刷子的尺寸扩大一倍。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1048, "sampling_model": "Bagel", "source_image_ori": "Part1/image/491.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1048/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/615/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/615/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1048/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1048/loser.png", "save_id": 615, "prompt_en": "Resize the monitor to half of its original size.", "prompt_cn": "将显示器缩小为原来的一半大小。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1049, "sampling_model": "Bagel", "source_image_ori": "Part1/image/201.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1049/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/300/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/300/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1049/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1049/loser.png", "save_id": 300, "prompt_en": "Remove the 'Stop' sign and the pink helmet, and add a skateboard on the road.", "prompt_cn": "移除‘Stop’的标志和粉色的头盔,在路上加入一个滑板。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1050, "sampling_model": "Bagel", "source_image_ori": "Part1/image/307.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1050/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/310/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/310/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1050/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1050/loser.png", "save_id": 310, "prompt_en": "Remove the top white book on the coffee table, replace the ceramic kettle with a globe, change the sofa to light yellow, and place a sports jacket on the sofa.", "prompt_cn": "移除茶几上最上面的那本白色书,将茶几上的陶瓷水壶替换为地球仪,将沙发改为淡黄色,并在沙发上放一件运动外套。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1051, "sampling_model": "Bagel", "source_image_ori": "Part1/image/312.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1051/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/313/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/313/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1051/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1051/loser.png", "save_id": 313, "prompt_en": "Make the boy look very sad, remove the water bottle from his backpack, and change the backpack to sky blue.", "prompt_cn": "让男孩的表情变得很难过,移除书包中的水杯,并将书包改为天蓝色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1052, "sampling_model": "Bagel", "source_image_ori": "Part1/image/313.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1052/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/314/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/314/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1052/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1052/loser.png", "save_id": 314, "prompt_en": "Add a small dog walking beside the surfer and change the surfboard color to bright yellow.", "prompt_cn": "在冲浪者旁边添加一只小狗,并将冲浪板颜色改为亮黄色", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1053, "sampling_model": "Bagel", "source_image_ori": "Part1/image/315.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1053/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/315/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/315/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1053/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1053/loser.png", "save_id": 315, "prompt_en": "Remove the two red lounge chairs and add a pink swim ring in the pool.", "prompt_cn": "移除两张红色躺椅,在游泳池中加入一个粉色的游泳圈。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1054, "sampling_model": "Bagel", "source_image_ori": "Part1/image/324.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1054/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/319/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/319/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1054/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1054/loser.png", "save_id": 319, "prompt_en": "Change the pink sand in the hourglass to blue, remove the redwood clock on the right, change the watch strap to brown, and add an ink bottle in front of the watch.", "prompt_cn": "将沙漏中的粉色沙子改为蓝色,移除右侧的红木时钟,将表带改为咖啡色,并在手表前方加入一个墨水瓶。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1055, "sampling_model": "Bagel", "source_image_ori": "Part1/image/325.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1055/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/320/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/320/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1055/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1055/loser.png", "save_id": 320, "prompt_en": "Add a line of text saying 'RELAX TIME' on the teddy bear's chest and change the color of the headphones it is wearing to orange.", "prompt_cn": "在泰迪熊胸前添加一行写着‘RELAX TIME’的文字,并将它戴着的耳机颜色改为橘黄色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1056, "sampling_model": "Bagel", "source_image_ori": "Part1/image/334.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1056/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/325/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/325/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1056/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1056/loser.png", "save_id": 325, "prompt_en": "Add a watch to the boy's right wrist, remove the book on the table and the spoon in the plate, and refill the coffee cup with coffee.", "prompt_cn": "将男孩的右手腕上戴上手表,移除桌子上的书和盘子中的勺子,将咖啡杯里的咖啡续满。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1057, "sampling_model": "Bagel", "source_image_ori": "Part1/image/334.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1057/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/325/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/325/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1057/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1057/loser.png", "save_id": 325, "prompt_en": "Add a watch to the boy's right wrist, remove the book on the table and the spoon in the plate, and refill the coffee cup with coffee.", "prompt_cn": "将男孩的右手腕上戴上手表,移除桌子上的书和盘子中的勺子,将咖啡杯里的咖啡续满。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1058, "sampling_model": "Bagel", "source_image_ori": "Part1/image/341.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1058/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/328/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/328/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1058/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1058/loser.png", "save_id": 328, "prompt_en": "Change the groom's suit color to white. Hang a large sign on the left side of the image with the text \"I love you\" in a neon light style.", "prompt_cn": "将新郎的西装颜色改为白色。在图像左侧悬挂一块尺寸很大的招牌,招牌上写着 “I love you”,字体采用霓虹灯风格。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1059, "sampling_model": "Bagel", "source_image_ori": "Part1/image/342.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1059/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/329/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/329/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1059/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1059/loser.png", "save_id": 329, "prompt_en": "Change the “Massage” text on the black sachet to “Relax”, recolor the bag to deep purple, and add a small lit candle beside it.", "prompt_cn": "将黑色香薰袋上的“Massage”文字改为“Relax”,把袋子颜色改成深紫色,并在袋子旁边添加一支点燃的小蜡烛", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1060, "sampling_model": "Bagel", "source_image_ori": "Part1/image/345.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1060/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/331/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/331/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1060/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1060/loser.png", "save_id": 331, "prompt_en": "Change the girl's T-shirt to yellow and have her display a very confused or puzzled expression and pose.", "prompt_cn": "将女孩的T恤改为黄色,并让她表现出非常困惑或不解的表情和动作。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1061, "sampling_model": "Bagel", "source_image_ori": "Part1/image/347.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1061/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/333/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/333/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1061/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1061/loser.png", "save_id": 333, "prompt_en": "Remove the star decoration in the top-right corner, change the blanket on the bed to red, add a wooden plaque on the window, and engrave 'Happy New Year' on the plaque.", "prompt_cn": "移除右上角的五角星装饰,将床上的毯子改为红色,在窗户上添加一个木质牌匾,并在牌匾上刻上‘Happy New Year’字样。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1062, "sampling_model": "Bagel", "source_image_ori": "Part1/image/363.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1062/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/345/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/345/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1062/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1062/loser.png", "save_id": 345, "prompt_en": "Swap the positions of the two pairs of shoes, remove the green plant from the image, add a shoebox to the right of the shoes, and change the cushion on the sofa to blue.", "prompt_cn": "交换两双鞋的位置,移除图中的绿植,在鞋的右边添加鞋盒,并将沙发上的抱枕改为蓝色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1063, "sampling_model": "Bagel", "source_image_ori": "Part1/image/374.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1063/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/356/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/356/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1063/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1063/loser.png", "save_id": 356, "prompt_en": "Replace the microphone in the girl's hand with an ice cream, change the suit color to emerald green, and add a potted plant next to the large speaker.", "prompt_cn": "将女生手中的麦克风换成冰淇淋,将西装颜色改为翠绿色,并在大音箱旁添加一盆盆栽。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1064, "sampling_model": "Bagel", "source_image_ori": "Part1/image/375.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1064/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/357/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/357/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1064/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1064/loser.png", "save_id": 357, "prompt_en": "Remove the long bread on the right, replace the bread on the left with a silver fork, and change the orange juice to milk.", "prompt_cn": "移除右侧的长面包,将左侧的面包换成银色的叉子,并将橙汁替换为牛奶。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1065, "sampling_model": "Bagel", "source_image_ori": "Part1/image/376.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1065/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/358/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/358/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1065/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1065/loser.png", "save_id": 358, "prompt_en": "Replace the globe in the image with a bucket of popcorn, remove the green plant, and change the black-and-white painting on the wall to color.", "prompt_cn": "将图中的地球仪换成一桶爆米花,移除图中的绿植,并将墙上的黑白画改为彩色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1066, "sampling_model": "Bagel", "source_image_ori": "Part1/image/377.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1066/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/359/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/359/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1066/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1066/loser.png", "save_id": 359, "prompt_en": "Replace the strawberries in the image with cherries, change the milk to watermelon juice, and remove the spoon from the image.", "prompt_cn": "将图中的草莓换成樱桃,将牛奶替换为西瓜汁,并移除图中的勺子。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1067, "sampling_model": "Bagel", "source_image_ori": "Part1/image/441.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1067/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/364/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/364/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1067/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1067/loser.png", "save_id": 364, "prompt_en": "Remove the towel in the bottom left corner and add an electric toothbrush in the empty space on the right, turned on.", "prompt_cn": "删除左下角的毛巾,并在右侧空白处添加一支打开的电动牙刷。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1068, "sampling_model": "Bagel", "source_image_ori": "Part1/image/444.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1068/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/366/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/366/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1068/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1068/loser.png", "save_id": 366, "prompt_en": "Change the headphone case in the image to black and the headphones to sky blue.", "prompt_cn": "将图像中的耳机盒变为黑色,耳机变为天蓝色", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1069, "sampling_model": "Bagel", "source_image_ori": "Part1/image/444.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1069/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/366/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/366/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1069/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1069/loser.png", "save_id": 366, "prompt_en": "Change the headphone case in the image to black and the headphones to sky blue.", "prompt_cn": "将图像中的耳机盒变为黑色,耳机变为天蓝色", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1070, "sampling_model": "Bagel", "source_image_ori": "Part1/image/447.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1070/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/369/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/369/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1070/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1070/loser.png", "save_id": 369, "prompt_en": "Change the sink color to blue and replace the mirror with a round mirror.", "prompt_cn": "将洗手台颜色改为蓝色,并将镜子替换为圆形镜子。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1071, "sampling_model": "Bagel", "source_image_ori": "Part1/image/451.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1071/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/374/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/374/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1071/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1071/loser.png", "save_id": 374, "prompt_en": "Have a person sit on the chair reading a book, replace the vase with a table lamp, and add a kitten on the stairs.", "prompt_cn": "让一个人坐在椅子上看书,把花瓶换成台灯,并在楼梯上加入一只小猫。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1072, "sampling_model": "Bagel", "source_image_ori": "Part1/image/460.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1072/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/380/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/380/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1072/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1072/loser.png", "save_id": 380, "prompt_en": "Remove the laptop from the person and the plant beside them.", "prompt_cn": "移除人物身上的电脑,和旁边的植物。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1073, "sampling_model": "Bagel", "source_image_ori": "Part1/image/461.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1073/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/381/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/381/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1073/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1073/loser.png", "save_id": 381, "prompt_en": "Change the green paint on the wall to pink, add the text 'Love Home', and replace the girl's pants with shorts.", "prompt_cn": "将墙上的绿色油漆变成粉色,并写上文字‘Love Home’,将女生的裤子换成短裤。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1074, "sampling_model": "Bagel", "source_image_ori": "Part1/image/469.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1074/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/385/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/385/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1074/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1074/loser.png", "save_id": 385, "prompt_en": "Replace the lantern with a fire extinguisher, change the carpet to brown, add the text 'Happy Holidays' above the fireplace, and remove the bamboo basket.", "prompt_cn": "将灯笼替换为灭火器,将地毯改为棕色,在壁炉上方添加文字‘Happy Holidays’,并移除竹篮。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1075, "sampling_model": "Bagel", "source_image_ori": "Part1/image/474.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1075/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/388/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/388/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1075/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1075/loser.png", "save_id": 388, "prompt_en": "Place a sign reading 'Dream House' in the center of the lawn and add a cardboard box to the bottom-right corner.", "prompt_cn": "在草坪中间竖立一块写着‘Dream House’的牌子,并在右下角添加一个纸箱。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1076, "sampling_model": "Bagel", "source_image_ori": "Part1/image/480.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1076/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/393/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/393/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1076/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1076/loser.png", "save_id": 393, "prompt_en": "Change the girl's top to pink, add a sports backpack to the left of the chair, and place a badminton racket on the right side of the chair.", "prompt_cn": "将女孩的上衣改为粉色,在椅子左侧添加一个运动书包,在椅子右侧添加一支羽毛球拍。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1077, "sampling_model": "Bagel", "source_image_ori": "Part1/image/480.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1077/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/393/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/393/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1077/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1077/loser.png", "save_id": 393, "prompt_en": "Change the girl's top to pink, add a sports backpack to the left of the chair, and place a badminton racket on the right side of the chair.", "prompt_cn": "将女孩的上衣改为粉色,在椅子左侧添加一个运动书包,在椅子右侧添加一支羽毛球拍。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1078, "sampling_model": "Bagel", "source_image_ori": "Part1/image/484.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1078/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/395/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/395/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1078/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1078/loser.png", "save_id": 395, "prompt_en": "Change the safety helmet to yellow and remove the gloves from the man's hands.", "prompt_cn": "将安全帽改为黄色,并移除男人手上的手套。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1079, "sampling_model": "Bagel", "source_image_ori": "Part1/image/485.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1079/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/396/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/396/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1079/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1079/loser.png", "save_id": 396, "prompt_en": "Remove the blood pressure monitor from the image and add a Hello Kitty doll on the table.", "prompt_cn": "移除图中的血压计,并在桌子上添加一个凯蒂猫玩偶。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1080, "sampling_model": "Bagel", "source_image_ori": "Part1/image/494.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1080/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/403/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/403/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1080/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1080/loser.png", "save_id": 403, "prompt_en": "Remove the laptop from the desk and add a marble coffee table in the center of the image.", "prompt_cn": "移除书桌上的笔记本电脑,并在图像中央添加一个大理石茶几。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1081, "sampling_model": "Bagel", "source_image_ori": "Part1/image/494.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1081/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/403/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/403/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1081/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1081/tie_2.png", "save_id": 403, "prompt_en": "Remove the laptop from the desk and add a marble coffee table in the center of the image.", "prompt_cn": "移除书桌上的笔记本电脑,并在图像中央添加一个大理石茶几。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1082, "sampling_model": "Bagel", "source_image_ori": "Part1/image/497.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1082/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/405/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/405/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1082/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1082/loser.png", "save_id": 405, "prompt_en": "Remove the white car on the right side of the road and add an ambulance in the middle of the road.", "prompt_cn": "移除路右边的白色车,并在路中间添加一辆急救车。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1083, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/16.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1083/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/413/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/413/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1083/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1083/loser.png", "save_id": 413, "prompt_en": "Have the girl open her eyes and sit up, showing an angry expression, and remove the bedside lamp.", "prompt_cn": "让这个女孩睁开眼睛并坐起来,表现出愤怒的表情,移除床边的台灯。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1084, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/16.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1084/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/413/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/413/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1084/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1084/loser.png", "save_id": 413, "prompt_en": "Have the girl open her eyes and sit up, showing an angry expression, and remove the bedside lamp.", "prompt_cn": "让这个女孩睁开眼睛并坐起来,表现出愤怒的表情,移除床边的台灯。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1085, "sampling_model": "Bagel", "source_image_ori": "Part1/image/10.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1085/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/692/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/692/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1085/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1085/loser.png", "save_id": 692, "prompt_en": "Extract the airplane model from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的飞机模型,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1086, "sampling_model": "Bagel", "source_image_ori": "Part1/image/16.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1086/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/693/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/693/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1086/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1086/loser.png", "save_id": 693, "prompt_en": "Extract the robot figure on the left, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出左边的机器人公仔,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1087, "sampling_model": "Bagel", "source_image_ori": "Part1/image/47.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1087/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/696/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/696/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1087/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1087/loser.png", "save_id": 696, "prompt_en": "Extract the hanging clock on the side of the building, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取建筑物侧面的挂钟,保持其位置、朝向和姿态不变,并将背景替换为纯白色。 ", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1088, "sampling_model": "Bagel", "source_image_ori": "Part1/image/84.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1088/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/701/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/701/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1088/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1088/loser.png", "save_id": 701, "prompt_en": "Extract the yellow motorcycle from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出黄色的摩托车,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1089, "sampling_model": "Bagel", "source_image_ori": "Part1/image/85.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1089/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/702/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/702/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1089/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1089/loser.png", "save_id": 702, "prompt_en": "Extract the blue Easter egg and its egg holder in the center, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出中间的蓝色彩蛋和蛋托,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1090, "sampling_model": "Bagel", "source_image_ori": "Part1/image/85.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1090/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/702/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/702/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1090/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1090/loser.png", "save_id": 702, "prompt_en": "Extract the blue Easter egg and its egg holder in the center, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出中间的蓝色彩蛋和蛋托,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1091, "sampling_model": "Bagel", "source_image_ori": "Part1/image/88.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1091/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/703/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/703/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1091/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1091/loser.png", "save_id": 703, "prompt_en": "Extract the black 8-ball, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出那颗黑色的8号台球,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1092, "sampling_model": "Bagel", "source_image_ori": "Part1/image/89.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1092/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/704/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/704/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1092/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1092/loser.png", "save_id": 704, "prompt_en": "Extract the white mug on the right, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出右侧的这个白色马克杯,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1093, "sampling_model": "Bagel", "source_image_ori": "Part1/image/100.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1093/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/707/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/707/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1093/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1093/loser.png", "save_id": 707, "prompt_en": "Extract the larger pigeon, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出较大的鸽子,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1094, "sampling_model": "Bagel", "source_image_ori": "Part1/image/107.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1094/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/710/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/710/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1094/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1094/loser.png", "save_id": 710, "prompt_en": "Extract the child and the small dog they are walking, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的小孩以及牵着的小狗,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1095, "sampling_model": "Bagel", "source_image_ori": "Part1/image/153.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1095/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/717/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/717/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1095/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1095/loser.png", "save_id": 717, "prompt_en": "Extract the microphone from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的麦克风,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1096, "sampling_model": "Bagel", "source_image_ori": "Part1/image/169.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1096/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/723/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/723/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1096/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1096/loser.png", "save_id": 723, "prompt_en": "Extract the snowman toy from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中的雪人玩偶,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1097, "sampling_model": "Bagel", "source_image_ori": "Part1/image/170.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1097/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/724/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/724/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1097/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1097/loser.png", "save_id": 724, "prompt_en": "Extract the blue jumpsuit from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的蓝色连体衣,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1098, "sampling_model": "Bagel", "source_image_ori": "Part1/image/190.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1098/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/734/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/734/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1098/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1098/loser.png", "save_id": 734, "prompt_en": "Extract the paraglider and the pilot from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中的滑翔伞和飞行员,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1099, "sampling_model": "Bagel", "source_image_ori": "Part1/image/321.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1099/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/739/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/739/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1099/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1099/loser.png", "save_id": 739, "prompt_en": "Extract the tennis racket from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中的网球拍,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1100, "sampling_model": "Bagel", "source_image_ori": "Part1/image/323.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1100/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/740/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/740/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1100/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1100/loser.png", "save_id": 740, "prompt_en": "Extract the baby lion from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中的小狮子,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1101, "sampling_model": "Bagel", "source_image_ori": "Part1/image/323.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1101/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/740/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/740/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1101/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1101/loser.png", "save_id": 740, "prompt_en": "Extract the baby lion from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中的小狮子,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1102, "sampling_model": "Bagel", "source_image_ori": "Part1/image/355.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1102/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/748/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/748/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1102/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1102/loser.png", "save_id": 748, "prompt_en": "Extract the butterfly from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的蝴蝶,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1103, "sampling_model": "Bagel", "source_image_ori": "Part1/image/409.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1103/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/751/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/751/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1103/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1103/loser.png", "save_id": 751, "prompt_en": "Extract the Snoopy figurine wearing a graduation cap and the “Class of 2026” sign from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中戴学士帽的史努比公仔及“Class of 2026”牌子,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1104, "sampling_model": "Bagel", "source_image_ori": "Part1/image/450.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1104/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/757/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/757/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1104/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1104/loser.png", "save_id": 757, "prompt_en": "Extract the dressing table from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中的梳妆台,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1105, "sampling_model": "Bagel", "source_image_ori": "Part1/image/458.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1105/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/758/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/758/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1105/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1105/tie_2.png", "save_id": 758, "prompt_en": "Extract the car from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的这辆车,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1106, "sampling_model": "Bagel", "source_image_ori": "Part1/image/463.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1106/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/759/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/759/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1106/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1106/loser.png", "save_id": 759, "prompt_en": "Extract the two dogs from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的两只狗,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1107, "sampling_model": "Bagel", "source_image_ori": "Part1/image/466.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1107/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/761/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/761/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1107/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1107/loser.png", "save_id": 761, "prompt_en": "Extract the dark blue notebook on the right side of the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像右侧的深蓝色笔记本,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1108, "sampling_model": "Bagel", "source_image_ori": "Part1/image/466.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1108/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/761/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/761/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1108/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1108/loser.png", "save_id": 761, "prompt_en": "Extract the dark blue notebook on the right side of the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像右侧的深蓝色笔记本,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1109, "sampling_model": "Bagel", "source_image_ori": "Part1/image/468.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1109/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/762/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/762/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1109/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1109/loser.png", "save_id": 762, "prompt_en": "Extract the white boat from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中的那艘白船,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1110, "sampling_model": "Bagel", "source_image_ori": "Part1/image/489.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1110/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/768/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Extract/768/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1110/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1110/loser.png", "save_id": 768, "prompt_en": "Extract the woman on the left side of the image, including the megaphone, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中的左侧女性(包括扩音器),保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1111, "sampling_model": "Bagel", "source_image_ori": "Part2/Move/7.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1111/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/973/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/973/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1111/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1111/loser.png", "save_id": 973, "prompt_en": "Move the coffee cup to the bottom-right corner of the image.", "prompt_cn": "将咖啡杯移动到右下角。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1112, "sampling_model": "Bagel", "source_image_ori": "Part2/Move/15.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1112/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/980/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/980/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1112/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1112/loser.png", "save_id": 980, "prompt_en": "Move the painting to the right.", "prompt_cn": "将这幅画向右移动。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1113, "sampling_model": "Bagel", "source_image_ori": "Part2/Move/15.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1113/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/980/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/980/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1113/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1113/loser.png", "save_id": 980, "prompt_en": "Move the painting to the right.", "prompt_cn": "将这幅画向右移动。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1114, "sampling_model": "Bagel", "source_image_ori": "Part2/Move/65.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1114/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/1015/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/1015/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1114/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1114/loser.png", "save_id": 1015, "prompt_en": "Move the sticky note to the bottom bezel of the monitor.", "prompt_cn": "将便利贴移动到显示器底部边框处。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1115, "sampling_model": "Bagel", "source_image_ori": "Part2/Move/93.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1115/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/1028/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/1028/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1115/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1115/loser.png", "save_id": 1028, "prompt_en": "Move the red cube to the bottom left corner.", "prompt_cn": "将红色立方体移动到左下角。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1116, "sampling_model": "Bagel", "source_image_ori": "Part2/Move/94.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1116/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/1029/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/1029/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1116/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1116/loser.png", "save_id": 1029, "prompt_en": "Move the blue sphere to the right of the yellow cylinder.", "prompt_cn": "将蓝色球体移动到黄色圆柱的右边。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1117, "sampling_model": "Bagel", "source_image_ori": "Part2/Move/94.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1117/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/1029/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/1029/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1117/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1117/loser.png", "save_id": 1029, "prompt_en": "Move the blue sphere to the right of the yellow cylinder.", "prompt_cn": "将蓝色球体移动到黄色圆柱的右边。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1118, "sampling_model": "Bagel", "source_image_ori": "Part1/image/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1118/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/133/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/133/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1118/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1118/loser.png", "save_id": 133, "prompt_en": "Remove the rainbow from the sky.", "prompt_cn": "移除天空中的彩虹。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1119, "sampling_model": "Bagel", "source_image_ori": "Part1/image/10.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1119/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/136/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/136/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1119/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1119/loser.png", "save_id": 136, "prompt_en": "Remove the magnifying glass from the image.", "prompt_cn": "移除图中的放大镜 ", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1120, "sampling_model": "Bagel", "source_image_ori": "Part1/image/16.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1120/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/138/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/138/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1120/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1120/loser.png", "save_id": 138, "prompt_en": "Remove the robot figure on the right.", "prompt_cn": "移除右边的机器人公仔", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1121, "sampling_model": "Bagel", "source_image_ori": "Part1/image/27.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1121/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/140/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/140/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1121/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1121/loser.png", "save_id": 140, "prompt_en": "Remove the heart from the image.", "prompt_cn": "移除图像中的爱心。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1122, "sampling_model": "Bagel", "source_image_ori": "Part1/image/30.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1122/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/142/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/142/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1122/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1122/loser.png", "save_id": 142, "prompt_en": "Remove the umbrella in the air.", "prompt_cn": "移除空中的雨伞。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1123, "sampling_model": "Bagel", "source_image_ori": "Part1/image/257.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1123/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/157/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/157/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1123/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1123/loser.png", "save_id": 157, "prompt_en": "Remove the camera from the image.", "prompt_cn": "移除图中的相机。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1124, "sampling_model": "Bagel", "source_image_ori": "Part1/image/261.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1124/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/158/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/158/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1124/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1124/loser.png", "save_id": 158, "prompt_en": "Remove all the popcorn pieces that are scattered on the table.", "prompt_cn": "移除桌面上所有散落的爆米花粒。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1125, "sampling_model": "Bagel", "source_image_ori": "Part1/image/277.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1125/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/166/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/166/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1125/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1125/loser.png", "save_id": 166, "prompt_en": "Remove any candles from the composition.", "prompt_cn": "移除图中的蜡烛。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1126, "sampling_model": "Bagel", "source_image_ori": "Part1/image/416.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1126/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/177/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/177/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1126/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1126/loser.png", "save_id": 177, "prompt_en": "Remove the largest rubber duck from the image.", "prompt_cn": "移除图中的最大的橡皮鸭。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1127, "sampling_model": "Bagel", "source_image_ori": "Part2/Remove_attribute/7.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1127/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/185/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/185/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1127/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1127/loser.png", "save_id": 185, "prompt_en": "Remove all writing instruments.", "prompt_cn": "移除所有书写工具。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1128, "sampling_model": "Bagel", "source_image_ori": "Part2/Remove_attribute/13.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1128/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/191/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/191/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1128/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1128/loser.png", "save_id": 191, "prompt_en": "Remove all meat products.", "prompt_cn": "移除所有肉类食品。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1129, "sampling_model": "Bagel", "source_image_ori": "Part2/Remove_spatial/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1129/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/196/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/196/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1129/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1129/loser.png", "save_id": 196, "prompt_en": "Remove the armchair on the left side.", "prompt_cn": "删除图像左侧的扶手椅。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1130, "sampling_model": "Bagel", "source_image_ori": "Part2/Remove_spatial/4.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1130/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/199/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/199/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1130/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1130/loser.png", "save_id": 199, "prompt_en": "Remove the top book from the stack on the right side of the image.", "prompt_cn": "移除右边最上面的那本书。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1131, "sampling_model": "Bagel", "source_image_ori": "Part2/Remove_spatial/4.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1131/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/199/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/199/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1131/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1131/loser.png", "save_id": 199, "prompt_en": "Remove the top book from the stack on the right side of the image.", "prompt_cn": "移除右边最上面的那本书。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1132, "sampling_model": "Bagel", "source_image_ori": "Part2/Remove_spatial/8.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1132/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/202/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/202/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1132/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1132/loser.png", "save_id": 202, "prompt_en": "Remove the pillow on the right.", "prompt_cn": "移除右侧的枕头。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1133, "sampling_model": "Bagel", "source_image_ori": "Part2/Remove_spatial/8.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1133/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/202/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/202/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1133/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1133/loser.png", "save_id": 202, "prompt_en": "Remove the pillow on the right.", "prompt_cn": "移除右侧的枕头。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1134, "sampling_model": "Bagel", "source_image_ori": "Part2/Remove_spatial/12.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1134/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/206/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/206/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1134/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1134/loser.png", "save_id": 206, "prompt_en": "Remove the teddy bear on the left.", "prompt_cn": "移除左侧的泰迪熊。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1135, "sampling_model": "Bagel", "source_image_ori": "Part2/Remove_spatial/13.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1135/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/207/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/207/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1135/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1135/loser.png", "save_id": 207, "prompt_en": "Remove the coffee mug next to the potted plant.", "prompt_cn": "移除放在盆栽旁边的咖啡杯。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1136, "sampling_model": "Bagel", "source_image_ori": "Part2/Remove_spatial/14.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1136/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/208/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/208/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1136/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1136/loser.png", "save_id": 208, "prompt_en": "Remove the apple next to the banana.", "prompt_cn": "移除在香蕉旁边的那个苹果。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1137, "sampling_model": "Bagel", "source_image_ori": "Part2/Remove_spatial/23.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1137/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/217/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/217/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1137/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1137/loser.png", "save_id": 217, "prompt_en": "Remove the USB drive that is nearest to the mouse.", "prompt_cn": "移除离鼠标最近的USB闪存盘。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1138, "sampling_model": "Bagel", "source_image_ori": "Part2/Remove_spatial/23.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1138/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/217/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/217/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1138/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1138/loser.png", "save_id": 217, "prompt_en": "Remove the USB drive that is nearest to the mouse.", "prompt_cn": "移除离鼠标最近的USB闪存盘。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1139, "sampling_model": "Bagel", "source_image_ori": "Part2/Remove_spatial/25.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1139/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/219/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/219/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1139/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1139/loser.png", "save_id": 219, "prompt_en": "Remove the clay flower pot closest to the shovel.", "prompt_cn": "移除离铲子最近的陶土花盆。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1140, "sampling_model": "Bagel", "source_image_ori": "Part2/Remove_spatial/30.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1140/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/224/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/224/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1140/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1140/loser.png", "save_id": 224, "prompt_en": "Remove the helmet closest to the bicycle.", "prompt_cn": "把离自行车最近的头盔移除。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1141, "sampling_model": "Bagel", "source_image_ori": "Part1/image/28.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1141/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/141/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/141/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1141/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1141/loser.png", "save_id": 141, "prompt_en": "Remove the small beige pillow on the right side of the table.", "prompt_cn": "移除桌子右侧的米白色小抱枕。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1142, "sampling_model": "Bagel", "source_image_ori": "Part1/image/33.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1142/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/143/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Remove/143/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1142/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1142/loser.png", "save_id": 143, "prompt_en": "Remove the lamp on the right bedside table.", "prompt_cn": "移除右侧床头柜的台灯。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1143, "sampling_model": "Bagel", "source_image_ori": "Part1/image/99.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1143/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Replace/233/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Replace/233/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1143/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1143/tie_2.png", "save_id": 233, "prompt_en": "Replace the wooden bench with a bicycle.", "prompt_cn": "将木长椅替换为一辆自行车。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1144, "sampling_model": "Bagel", "source_image_ori": "Part1/image/134.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1144/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Replace/247/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Replace/247/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1144/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1144/loser.png", "save_id": 247, "prompt_en": "Turn the cilantro sprig in the upper right of the plate into a small edible purple orchid.", "prompt_cn": "把盘子右上角的香菜叶变成一朵小巧的可食用紫色兰花", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1145, "sampling_model": "Bagel", "source_image_ori": "Part1/image/137.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1145/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Replace/249/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Replace/249/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1145/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1145/loser.png", "save_id": 249, "prompt_en": "Replace the stuffed bunny on the bed with a large, yellow rubber duck wearing sunglasses.", "prompt_cn": "将床上的毛绒兔子替换为一个戴着太阳镜的、大号黄色橡皮鸭。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1146, "sampling_model": "Bagel", "source_image_ori": "Part1/image/160.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1146/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Replace/256/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Replace/256/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1146/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1146/loser.png", "save_id": 256, "prompt_en": "Replace the gold pillow on the sofa with a white cushion.", "prompt_cn": "把沙发上的金色枕头替换成白色靠垫。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1147, "sampling_model": "Bagel", "source_image_ori": "Part1/image/193.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1147/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Replace/262/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Replace/262/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1147/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1147/tie_2.png", "save_id": 262, "prompt_en": "Replace the middle chick toy with a bear plush.", "prompt_cn": "把中间的小鸭玩偶换成一只熊玩偶。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1148, "sampling_model": "Bagel", "source_image_ori": "Part1/image/194.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1148/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Replace/263/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Replace/263/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1148/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1148/tie_2.png", "save_id": 263, "prompt_en": "Replace the foreground boat with a kayak.", "prompt_cn": "把前景的小船换成一艘皮划艇。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1149, "sampling_model": "Bagel", "source_image_ori": "Part1/image/260.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1149/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Replace/265/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Replace/265/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1149/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1149/loser.png", "save_id": 265, "prompt_en": "Replace the emoji face with a realistic small football.", "prompt_cn": "将中央的表情圆形替换成一个真实质感的小足球。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1150, "sampling_model": "Bagel", "source_image_ori": "Part1/image/312.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1150/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Replace/270/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Replace/270/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1150/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1150/loser.png", "save_id": 270, "prompt_en": "Change the boy's jacket to a beige trench coat.", "prompt_cn": "将男生的外套换为一件米黄色的风衣。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1151, "sampling_model": "Bagel", "source_image_ori": "Part1/image/430.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1151/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Replace/279/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Replace/279/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1151/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1151/loser.png", "save_id": 279, "prompt_en": "Replace the left circular window with a square window.", "prompt_cn": "将左侧圆形窗户替换为一个方形窗户。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1152, "sampling_model": "Bagel", "source_image_ori": "Part1/image/439.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1152/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Replace/282/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Replace/282/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1152/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1152/tie_2.png", "save_id": 282, "prompt_en": "Change the boy’s top to a short-sleeved shirt.", "prompt_cn": "将男生的上衣换为一件短袖", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1153, "sampling_model": "Bagel", "source_image_ori": "Part1/image/456.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1153/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Replace/292/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Replace/292/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1153/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1153/tie_2.png", "save_id": 292, "prompt_en": "Replace the boy holding the barbell in the image with a girl.", "prompt_cn": "将图片中握着杠铃的这个男孩换成一个女孩。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1154, "sampling_model": "Bagel", "source_image_ori": "Part2/Swap/4.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1154/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1040/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1040/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1154/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1154/loser.png", "save_id": 1040, "prompt_en": "Swap the positions of the coffee cup and the vase.", "prompt_cn": "将咖啡杯和花瓶的位置互相对调。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1155, "sampling_model": "Bagel", "source_image_ori": "Part2/Swap/5.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1155/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1041/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1041/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1155/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1155/loser.png", "save_id": 1041, "prompt_en": "Swap the positions of the two people.", "prompt_cn": "交换这两个人的位置。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1156, "sampling_model": "Bagel", "source_image_ori": "Part2/Swap/8.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1156/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1044/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1044/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1156/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1156/loser.png", "save_id": 1044, "prompt_en": "Swap the positions of the red apple and the green pear.", "prompt_cn": "交换红色苹果和绿色梨的位置。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1157, "sampling_model": "Bagel", "source_image_ori": "Part2/Swap/58.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1157/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1087/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1087/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1157/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1157/loser.png", "save_id": 1087, "prompt_en": "Swap the states of the liquids in the two cups.", "prompt_cn": "交换两件衣服的颜色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1158, "sampling_model": "Bagel", "source_image_ori": "Part2/Swap/75.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1158/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1103/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1103/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1158/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1158/loser.png", "save_id": 1103, "prompt_en": "Swap the poses and states of the two cats.", "prompt_cn": "交换两个猫的动作和状态。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1159, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_cn/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1159/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_CN/1600/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_CN/1600/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1159/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1159/loser.png", "save_id": 1600, "prompt_en": "Delete the large pink-bordered Chinese text “2月14日 情人节上映” at the bottom center.", "prompt_cn": "删除画面底部中央粉色描边的中文“2月14日 情人节上映”文字", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1160, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_cn/5.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1160/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_CN/1608/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_CN/1608/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1160/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1160/loser.png", "save_id": 1608, "prompt_en": "Remove the brown Chinese title text “暑期档合家欢” from the central white banner.", "prompt_cn": "移除中央白色横幅上棕色的“暑期档合家欢”中文标题文字", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1161, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_cn/10.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1161/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_CN/1617/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_CN/1617/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1161/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1161/loser.png", "save_id": 1617, "prompt_en": "Erase the Chinese text “孤独是最长的旅程” at the center of the image.", "prompt_cn": "擦除画面中央的中文文字“孤独是最长的旅程”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1162, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_cn/15.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1162/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_CN/1628/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_CN/1628/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1162/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1162/loser.png", "save_id": 1628, "prompt_en": "Insert bright orange sans‑serif Chinese text “立即加入” centered above the two athletes.", "prompt_cn": "在两人上方靠中间位置插入橙色无衬线中文“立即加入”", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1163, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_cn/18.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1163/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_CN/1632/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_CN/1632/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1163/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1163/loser.png", "save_id": 1632, "prompt_en": "Erase the black Chinese text “立即免费试听” from the central yellow button.", "prompt_cn": "擦除中间黄色按钮上的黑色中文“立即免费试听”字样", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1164, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_cn/18.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1164/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_CN/1633/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_CN/1633/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1164/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1164/loser.png", "save_id": 1633, "prompt_en": "Change '立即免费试听' to '限时拼团报名'", "prompt_cn": "将文字“立即免费试听”修改为“限时拼团报名”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1165, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_cn/28.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1165/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_CN/1651/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_CN/1651/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1165/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1165/loser.png", "save_id": 1651, "prompt_en": "Erase the large vertical calligraphic Chinese characters in the center.", "prompt_cn": "擦除中间竖排的大号书法“王羲之流”汉字。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1166, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_cn/32.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1166/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_CN/1657/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_CN/1657/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1166/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1166/loser.png", "save_id": 1657, "prompt_en": "Change the warning text to '遵守交规,平安出行'", "prompt_cn": "将警示文字更改为“遵守交规,平安出行”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1167, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_cn/37.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1167/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_CN/1663/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_CN/1663/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1167/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1167/loser.png", "save_id": 1663, "prompt_en": "Remove the large black Chinese slogan “不听、不信、不转账” from the center of the poster.", "prompt_cn": "移除海报中间大号黑色中文“不听、不信、不转账”字样", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1168, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1168/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1739/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1739/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1168/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1168/loser.png", "save_id": 1739, "prompt_en": "Erase the blood-red English title text “THE GHOST” from the upper center of the image.", "prompt_cn": "擦除画面上方中央血红色的英文标题“THE GHOST”文字", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1169, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1169/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1740/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1740/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1169/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1169/loser.png", "save_id": 1740, "prompt_en": "Change the title 'THE GHOST' to 'THE ALIEN'.", "prompt_cn": "将标题文字从“THE GHOST”更改为“THE ALIEN”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1170, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/6.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1170/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1746/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1746/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1170/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1170/loser.png", "save_id": 1746, "prompt_en": "Change the title 'OCEAN' to 'DESERT'.", "prompt_cn": "将标题“OCEAN”更改为“DESERT”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1171, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/9.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1171/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1750/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1750/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1171/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1171/loser.png", "save_id": 1750, "prompt_en": "Remove the large yellow English title text “THE GRIP OF TERROR!” in the lower right.", "prompt_cn": "删除右下角大号黄色英文标题“THE GRIP OF TERROR!”文字", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1172, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/15.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1172/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1756/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1756/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1172/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1172/loser.png", "save_id": 1756, "prompt_en": "Add the light brown English phrase 'Music for the soul' in the lower-left corner.", "prompt_cn": "在左下角添加浅棕色英文短句“Music for the soul”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1173, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/17.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1173/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1759/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1759/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1173/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1173/loser.png", "save_id": 1759, "prompt_en": "Change 'East Coast' to 'West Coast'.", "prompt_cn": "将文字“East Coast”修改为“West Coast”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1174, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/21.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1174/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1764/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1764/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1174/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1174/tie_2.png", "save_id": 1764, "prompt_en": "Add light-blue, handwritten-style text 'Beach' to the left of the sunglasses on the sand at the bottom of the image.", "prompt_cn": "在画面下方沙滩上太阳镜的左侧,添加浅蓝色手写风格文字‘Beach’。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1175, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/22.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1175/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1766/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1766/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1175/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1175/loser.png", "save_id": 1766, "prompt_en": "Change the word 'FRIDAY' to 'WEEKEND'.", "prompt_cn": "将单词“FRIDAY”更改为“WEEKEND”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1176, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/26.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1176/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1772/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1772/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1176/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1176/loser.png", "save_id": 1772, "prompt_en": "Erase the large black English words “BUY 1 GET 1” on the yellow sign.", "prompt_cn": "擦除黄色牌子上巨大的黑色英文“BUY 1 GET 1”字样", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1177, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/29.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1177/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1776/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1776/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1177/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1177/loser.png", "save_id": 1776, "prompt_en": "Change 'Spring Look' to 'Autumn Look'.", "prompt_cn": "将文字“Spring Look”更改为“Autumn Look”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1178, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/30.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1178/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1777/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1777/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1178/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1178/loser.png", "save_id": 1777, "prompt_en": "Remove the red price '$1.99/lb' on the left poster.", "prompt_cn": "清除左边海报上红色的价格“$1.99/lb”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1179, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/35.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1179/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1785/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1785/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1179/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1179/loser.png", "save_id": 1785, "prompt_en": "Change the word 'Mars' to 'Moon'.", "prompt_cn": "将单词“Mars”修改为“ Moon”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1180, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/39.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1180/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1789/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1789/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1180/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1180/loser.png", "save_id": 1789, "prompt_en": "Erase the large red English title text “STOP THE HACKERS” at the top of the poster.", "prompt_cn": "擦除画面顶部巨大的红色英文标题“STOP THE HACKERS”文字", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1181, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/42.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1181/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1791/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1791/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1181/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1181/loser.png", "save_id": 1791, "prompt_en": "Add the text 'Relax & Unwind' in the center of the sand below the hammock, using the same font and style as 'Hawaii' in the image.", "prompt_cn": "在吊床下方沙滩中央加入“Relax & Unwind”字样,字体和风格与图中的“Hawaii”保持一致。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1182, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/45.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1182/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1797/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1797/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1182/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1182/loser.png", "save_id": 1797, "prompt_en": "Change the text 'Mountain Climbing' to 'River Rafting'.", "prompt_cn": "将文字“Mountain Climbing”更改为“River Rafting”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1183, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/47.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1183/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1799/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1799/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1183/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1183/loser.png", "save_id": 1799, "prompt_en": "Change the word 'Bears' to 'Deer'.", "prompt_cn": "将单词“Bears”改为“Deer”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1184, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/47.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1184/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1799/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1799/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1184/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1184/loser.png", "save_id": 1799, "prompt_en": "Change the word 'Bears' to 'Deer'.", "prompt_cn": "将单词“Bears”改为“Deer”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1185, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/49.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1185/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1801/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1801/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1185/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1185/tie_2.png", "save_id": 1801, "prompt_en": "Change the text 'Orient Express' to 'Bullet Train'.", "prompt_cn": "将文字“Orient Express”修改为“Bullet Train”。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1186, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/50.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1186/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1803/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1803/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1186/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1186/tie_2.png", "save_id": 1803, "prompt_en": "Change 'Hop on Hop off' to 'Night Tour'.", "prompt_cn": "将文字“Hop on Hop off”修改为“Night Tour”。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1187, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/57.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1187/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1813/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1813/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1187/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1187/loser.png", "save_id": 1813, "prompt_en": "Change the word 'Meat' to 'Vegan'.", "prompt_cn": "将单词“Meat”改为“Vegan”。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1188, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/62.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1188/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1820/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1820/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1188/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1188/loser.png", "save_id": 1820, "prompt_en": "Change the score '1 - 0' to '2 - 2'.", "prompt_cn": "将比分从“1 - 0”改为“2 - 2”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1189, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/66.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1189/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1825/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1825/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1189/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1189/loser.png", "save_id": 1825, "prompt_en": "Remove the large orange “NEW YORK CITY MARATHON” English title text at the top.", "prompt_cn": "移除画面顶部橙色大字“NEW YORK CITY MARATHON”英文标题文字", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1190, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/71.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1190/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1831/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1831/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1190/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1190/loser.png", "save_id": 1831, "prompt_en": "Change the warning 'HIGH VOLTAGE' to 'WET FLOOR'.", "prompt_cn": "将警告文字从“HIGH VOLTAGE”更改为“WET FLOOR”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1191, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/72.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1191/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1832/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1832/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1191/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1191/loser.png", "save_id": 1832, "prompt_en": "Remove the English phrase “LOOK BOTH WAYS” inside the black rectangle at bottom left.", "prompt_cn": "移除左下角黑色矩形中的英文短语“LOOK BOTH WAYS”", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1192, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/101.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1192/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1833/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1833/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1192/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1192/tie_2.png", "save_id": 1833, "prompt_en": "Add red handwritten text reading “No Smoking” below the red no-smoking symbol.", "prompt_cn": "在红色禁烟符号下方添加红色手写体文字“No Smoking”。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1193, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/80.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1193/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1841/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1841/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1193/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1193/loser.png", "save_id": 1841, "prompt_en": "Erase the smaller black English sentence “Join our global team. Apply today.” on the poster.", "prompt_cn": "擦除海报中间较小的黑色英文句子“Join our global team. Apply today.”", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1194, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/103.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1194/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1846/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1846/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1194/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1194/loser.png", "save_id": 1846, "prompt_en": "Add the title “Little Rabbit” at the top center of the image using colorful bubble letters arranged in an arch.", "prompt_cn": "在图片上方中央加入标题“Little Rabbit”,采用五颜六色的气泡字母,并呈拱形排布。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1195, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/84.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1195/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1847/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1847/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1195/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1195/loser.png", "save_id": 1847, "prompt_en": "Change the word 'Rabbit' to 'Fox'.", "prompt_cn": "将单词“Rabbit”更改为“Fox”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1196, "sampling_model": "Bagel", "source_image_ori": "Part3/Text_en/96.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1196/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1859/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Visual_Text_EN/1859/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1196/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1196/loser.png", "save_id": 1859, "prompt_en": "Add the dark brown decorative title \"INVENTORY\" centered on the empty top banner strip.", "prompt_cn": "在屏幕顶部空白横幅上添加深棕色装饰字体\"INVENTORY\"标题", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1197, "sampling_model": "Bagel", "source_image_ori": "Part2/Obj_interaction/3.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1197/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1108/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1108/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1197/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1197/loser.png", "save_id": 1108, "prompt_en": "Have the girl practice lat pulldowns.", "prompt_cn": "让这个女孩做高位下拉动作。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1198, "sampling_model": "Bagel", "source_image_ori": "Part2/Obj_interaction/3.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1198/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1108/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1108/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1198/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1198/loser.png", "save_id": 1108, "prompt_en": "Have the girl practice lat pulldowns.", "prompt_cn": "让这个女孩做高位下拉动作。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1199, "sampling_model": "Bagel", "source_image_ori": "Part2/Obj_interaction/5.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1199/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1110/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1110/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1199/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1199/loser.png", "save_id": 1110, "prompt_en": "Make the man and woman hug each other tightly.", "prompt_cn": "让这名男子和女子紧紧拥抱在一起。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1200, "sampling_model": "Bagel", "source_image_ori": "Part2/Obj_interaction/7.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1200/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1112/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1112/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1200/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1200/tie_2.png", "save_id": 1112, "prompt_en": "Make the boy and girl hold hands.", "prompt_cn": "让这个男孩和这个女孩牵着手。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1201, "sampling_model": "Bagel", "source_image_ori": "Part2/Obj_interaction/17.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1201/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1121/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1121/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1201/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1201/loser.png", "save_id": 1121, "prompt_en": "Have the man perform a bench press, lifting the barbell.", "prompt_cn": "让这个男人正在做卧推,举起杠铃。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1202, "sampling_model": "Bagel", "source_image_ori": "Part2/Obj_interaction/25.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1202/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1128/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1128/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1202/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1202/loser.png", "save_id": 1128, "prompt_en": "Make the chef chop the carrot.", "prompt_cn": "让这位厨师看起来正在切胡萝卜。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1203, "sampling_model": "Bagel", "source_image_ori": "Part2/Obj_interaction/32.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1203/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1134/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1134/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1203/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1203/loser.png", "save_id": 1134, "prompt_en": "Make the man pick up and eat the burger.", "prompt_cn": "让这个男人拿起汉堡并吃下去。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1204, "sampling_model": "Bagel", "source_image_ori": "Part2/Obj_interaction/33.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1204/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1135/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1135/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1204/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1204/tie_2.png", "save_id": 1135, "prompt_en": "Make the girl blow out the candles.", "prompt_cn": "让这个女孩看起来正在吹灭蜡烛。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1205, "sampling_model": "Bagel", "source_image_ori": "Part2/Obj_interaction/33.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1205/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1135/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1135/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1205/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1205/loser.png", "save_id": 1135, "prompt_en": "Make the girl blow out the candles.", "prompt_cn": "让这个女孩看起来正在吹灭蜡烛。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1206, "sampling_model": "Bagel", "source_image_ori": "Part2/Obj_interaction/57.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1206/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1153/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1153/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1206/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1206/loser.png", "save_id": 1153, "prompt_en": "Make the person open the door.", "prompt_cn": "让这个人打开门。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1207, "sampling_model": "Bagel", "source_image_ori": "Part2/Obj_interaction/69.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1207/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1156/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1156/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1207/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1207/loser.png", "save_id": 1156, "prompt_en": "Make the woman lift the suitcase by its handle.", "prompt_cn": "让这位女子抓住手提箱的手柄把它提起来。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1208, "sampling_model": "Bagel", "source_image_ori": "Part2/Obj_interaction/76.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1208/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1162/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1162/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1208/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1208/loser.png", "save_id": 1162, "prompt_en": "Make the chef hold the frying pan up by its handle.", "prompt_cn": "让厨师用手握住煎锅的把手,把煎锅举起来。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1209, "sampling_model": "Bagel", "source_image_ori": "Part2/Obj_interaction/80.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1209/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1166/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1166/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1209/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1209/loser.png", "save_id": 1166, "prompt_en": "Make the person bend down and pick up the hat.", "prompt_cn": "让这个人弯下腰并捡起帽子。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1210, "sampling_model": "Bagel", "source_image_ori": "Part2/Obj_interaction/81.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1210/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1167/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1167/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1210/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1210/loser.png", "save_id": 1167, "prompt_en": "Make the cat push the ball of yarn with its paw.", "prompt_cn": "让猫用爪子推毛线球。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1211, "sampling_model": "Bagel", "source_image_ori": "Part2/Obj_interaction/84.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1211/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1168/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1168/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1211/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1211/loser.png", "save_id": 1168, "prompt_en": "Make the penguin move forward to push the ice block.", "prompt_cn": "让这只企鹅去推冰块", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1212, "sampling_model": "Bagel", "source_image_ori": "Part2/Obj_interaction/86.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1212/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1170/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1170/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1212/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1212/loser.png", "save_id": 1170, "prompt_en": "Make the teenager ride the skateboard.", "prompt_cn": "让这个青少年正在踩着滑板滑行。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1213, "sampling_model": "Bagel", "source_image_ori": "Part2/Obj_interaction/91.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1213/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1171/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Object_Interaction/1171/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1213/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1213/loser.png", "save_id": 1171, "prompt_en": "Make the barber reach out and touch the customer’s hair.", "prompt_cn": "让理发师去摸顾客的头发。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1214, "sampling_model": "Bagel", "source_image_ori": "Part1/image/73.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1214/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Style_Transfer/616/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Style_Transfer/616/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1214/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1214/tie_2.png", "save_id": 616, "prompt_en": "Convert the scene into a vintage 1970s film photo with grain and warm tones.", "prompt_cn": "将场景转换为带颗粒感和暖色调的 1970 年代复古胶片照片风格。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1215, "sampling_model": "Bagel", "source_image_ori": "Part1/image/75.webp", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1215/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Style_Transfer/618/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Style_Transfer/618/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1215/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1215/tie_2.png", "save_id": 618, "prompt_en": "Convert the image into an 8-bit pixel art style.", "prompt_cn": "将图像转换为 8 位像素画风格。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1216, "sampling_model": "Bagel", "source_image_ori": "Part1/image/97.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1216/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Style_Transfer/623/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Style_Transfer/623/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1216/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1216/loser.png", "save_id": 623, "prompt_en": "Change the painting on the left side of the wall to a Cyberpunk style.", "prompt_cn": "将墙上左面那幅画换为赛博朋克风格。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1217, "sampling_model": "Bagel", "source_image_ori": "Part1/image/98.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1217/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Style_Transfer/624/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Style_Transfer/624/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1217/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1217/tie_2.png", "save_id": 624, "prompt_en": "Transform the image into a minimalist flat illustration.", "prompt_cn": "将图像转换为极简扁平插画风格。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1218, "sampling_model": "Bagel", "source_image_ori": "Part1/image/98.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1218/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Style_Transfer/624/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Style_Transfer/624/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1218/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1218/loser.png", "save_id": 624, "prompt_en": "Transform the image into a minimalist flat illustration.", "prompt_cn": "将图像转换为极简扁平插画风格。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1219, "sampling_model": "Bagel", "source_image_ori": "Part1/image/159.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1219/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Style_Transfer/652/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Style_Transfer/652/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1219/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1219/loser.png", "save_id": 652, "prompt_en": "Transform the image into a cozy rustic farmhouse interior style.", "prompt_cn": "将图像转换为温馨的乡村农舍室内风格。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1220, "sampling_model": "Bagel", "source_image_ori": "Part1/image/175.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1220/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Style_Transfer/659/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Style_Transfer/659/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1220/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1220/loser.png", "save_id": 659, "prompt_en": "Transform the image into a Claymation style.", "prompt_cn": "将图像转化为黏土动画风格。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1221, "sampling_model": "Bagel", "source_image_ori": "Part2/emotion_change/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1221/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1179/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1179/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1221/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1221/tie_2.png", "save_id": 1179, "prompt_en": "Replace the smiling balloon with a frowning one.", "prompt_cn": "将微笑的气球替换为一个皱眉的气球。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1222, "sampling_model": "Bagel", "source_image_ori": "Part2/emotion_change/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1222/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1180/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1180/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1222/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1222/loser.png", "save_id": 1180, "prompt_en": "Make the cat look very fierce.”", "prompt_cn": "让这只猫看起来非常凶猛。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1223, "sampling_model": "Bagel", "source_image_ori": "Part2/emotion_change/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1223/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1180/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1180/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1223/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1223/loser.png", "save_id": 1180, "prompt_en": "Make the cat look very fierce.”", "prompt_cn": "让这只猫看起来非常凶猛。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1224, "sampling_model": "Bagel", "source_image_ori": "Part2/emotion_change/24.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1224/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1193/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1193/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1224/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1224/loser.png", "save_id": 1193, "prompt_en": "Change the expression to a sad face.", "prompt_cn": "将表情更改为悲伤的脸。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1225, "sampling_model": "Bagel", "source_image_ori": "Part2/emotion_change/30.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1225/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1198/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1198/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1225/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1225/loser.png", "save_id": 1198, "prompt_en": "Make the person sitting in the car look like they are having road rage.", "prompt_cn": "让车里坐的人看起来像是路怒症发作了。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1226, "sampling_model": "Bagel", "source_image_ori": "Part2/emotion_change/41.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1226/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1205/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1205/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1226/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1226/loser.png", "save_id": 1205, "prompt_en": "Make him show a fearful expression.", "prompt_cn": "让他展示出恐惧的表情。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1227, "sampling_model": "Bagel", "source_image_ori": "Part2/emotion_change/44.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1227/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1208/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1208/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1227/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1227/loser.png", "save_id": 1208, "prompt_en": "Make the cat look alert and focused.", "prompt_cn": "让猫看起来警觉且专注。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1228, "sampling_model": "Bagel", "source_image_ori": "Part2/emotion_change/51.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1228/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1213/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1213/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1228/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1228/tie_2.png", "save_id": 1213, "prompt_en": "Make the student yawning.", "prompt_cn": "让这位学生打哈欠。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1229, "sampling_model": "Bagel", "source_image_ori": "Part2/emotion_change/52.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1229/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1214/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1214/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1229/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1229/loser.png", "save_id": 1214, "prompt_en": "Make the dog look sleepy and dozing off.", "prompt_cn": "让这只狗看起来很困倦,正在打瞌睡。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1230, "sampling_model": "Bagel", "source_image_ori": "Part2/emotion_change/56.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1230/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1216/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1216/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1230/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1230/tie_2.png", "save_id": 1216, "prompt_en": "Make her sticking her tongue out playfully.", "prompt_cn": "让她调皮地伸出舌头。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1231, "sampling_model": "Bagel", "source_image_ori": "Part2/emotion_change/63.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1231/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1221/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1221/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1231/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1231/tie_2.png", "save_id": 1221, "prompt_en": "Make the dog tilt its head to the side in confusion.", "prompt_cn": "让这只狗把头歪向一侧,就好像它很困惑一样。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1232, "sampling_model": "Bagel", "source_image_ori": "Part2/emotion_change/72.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1232/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1225/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1225/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1232/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1232/loser.png", "save_id": 1225, "prompt_en": "Make him look very impatient.", "prompt_cn": "让他看起来很不耐烦。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1233, "sampling_model": "Bagel", "source_image_ori": "Part2/emotion_change/72.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1233/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1225/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Emotion_Change/1225/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1233/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1233/loser.png", "save_id": 1225, "prompt_en": "Make him look very impatient.", "prompt_cn": "让他看起来很不耐烦。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1235, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/1.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1235/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/773/4.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/773/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1235/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1235/tie_2.png", "save_id": 773, "prompt_en": "Change the sofa material to red velvet.", "prompt_cn": "将沙发的材质更改为红色天鹅绒。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1236, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/1.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1236/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/773/4.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/773/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1236/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1236/loser.png", "save_id": 773, "prompt_en": "Change the sofa material to red velvet.", "prompt_cn": "将沙发的材质更改为红色天鹅绒。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1237, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1237/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/774/2.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/774/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1237/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1237/tie_2.png", "save_id": 774, "prompt_en": "Make the armchair made of brown leather.", "prompt_cn": "将扶手椅的材质改为棕色皮革。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1238, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1238/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/775/5.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/775/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1238/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1238/tie_2.png", "save_id": 775, "prompt_en": "Turn the dining chair into a wooden one.", "prompt_cn": "将这把餐椅变成一把木制餐椅。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1239, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/7.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1239/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/779/21.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/779/22.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1239/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1239/tie_2.png", "save_id": 779, "prompt_en": "Replace the curtains with blue ones made of cotton-linen.", "prompt_cn": "把窗帘换成蓝色的棉麻材质。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1240, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/8.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1240/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/780/8.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/780/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1240/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1240/tie_2.png", "save_id": 780, "prompt_en": "Turn it into a leather bean bag.", "prompt_cn": "将它变成一个皮革懒人沙发。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1241, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/8.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1241/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/780/8.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/780/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1241/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1241/tie_2.png", "save_id": 780, "prompt_en": "Turn it into a leather bean bag.", "prompt_cn": "将它变成一个皮革懒人沙发。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1242, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/9.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1242/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/781/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/781/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1242/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1242/tie_2.png", "save_id": 781, "prompt_en": "Change the carpet to woven velvet.", "prompt_cn": "把地毯变成编织丝绒材质。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1243, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/9.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1243/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/781/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/781/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1243/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1243/tie_2.png", "save_id": 781, "prompt_en": "Change the carpet to woven velvet.", "prompt_cn": "把地毯变成编织丝绒材质。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1244, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/14.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1244/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/784/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/784/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1244/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1244/tie_2.png", "save_id": 784, "prompt_en": "Change the dining table to walnut wood.", "prompt_cn": "将餐桌改为胡桃木的。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1245, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/14.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1245/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/784/5.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/784/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1245/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1245/tie_2.png", "save_id": 784, "prompt_en": "Change the dining table to walnut wood.", "prompt_cn": "将餐桌改为胡桃木的。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1246, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/15.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1246/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/785/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/785/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1246/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1246/tie_2.png", "save_id": 785, "prompt_en": "Change the bedside table to pine wood.", "prompt_cn": "将床头柜改成松木的。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1247, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/16.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1247/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/786/17.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/786/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1247/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1247/tie_2.png", "save_id": 786, "prompt_en": "Turn the table into a metal one.", "prompt_cn": "将这张桌子变成一张金属桌子。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1248, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/22.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1248/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/791/2.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/791/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1248/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1248/tie_2.png", "save_id": 791, "prompt_en": "Turn the jacket into a denim one.", "prompt_cn": "将这件夹克变成一件牛仔夹克。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1249, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/22.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1249/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/791/4.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/791/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1249/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1249/tie_2.png", "save_id": 791, "prompt_en": "Turn the jacket into a denim one.", "prompt_cn": "将这件夹克变成一件牛仔夹克。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1250, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/25.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1250/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/792/5.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/792/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1250/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1250/tie_2.png", "save_id": 792, "prompt_en": "Change the jacket to leather.", "prompt_cn": "将外套改为皮质的。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1251, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Change_material/29.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1251/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/794/2.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Material/794/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1251/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1251/tie_2.png", "save_id": 794, "prompt_en": "Make the pants denim jeans.", "prompt_cn": "将这条裤子改成牛仔裤。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1252, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/81.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1252/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/829/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/829/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1252/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1252/tie_2.png", "save_id": 829, "prompt_en": "Keep the dog unchanged and change the background to the interior of a library filled with bookshelves.", "prompt_cn": "保持狗不变,将背景改为书架林立的图书馆内部。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1253, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/98.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1253/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/832/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/832/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1253/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1253/tie_2.png", "save_id": 832, "prompt_en": "Keep the bicycle, flowers, and watering can unchanged, and change the background to a sandy beach with the ocean and blue sky.", "prompt_cn": "保持自行车、花朵和浇水壶不变,将背景改为有海浪和蓝天的沙滩。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1254, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/133.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1254/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/837/11.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/837/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1254/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1254/tie_2.png", "save_id": 837, "prompt_en": "Keep the car unchanged, and change the background to a city highway at night.", "prompt_cn": "保持汽车不变,将背景改为夜晚的城市高速公路。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1255, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/156.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1255/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/841/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/841/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1255/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1255/tie_2.png", "save_id": 841, "prompt_en": "Keep the lighthouse unchanged, and change the background to a snowy Arctic landscape with icebergs.", "prompt_cn": "保持灯塔不变,将背景改为带有冰山的北极雪景。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1256, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/156.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1256/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/841/7.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/841/12.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1256/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1256/tie_2.png", "save_id": 841, "prompt_en": "Keep the lighthouse unchanged, and change the background to a snowy Arctic landscape with icebergs.", "prompt_cn": "保持灯塔不变,将背景改为带有冰山的北极雪景。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1257, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/158.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1257/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/843/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/843/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1257/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1257/tie_2.png", "save_id": 843, "prompt_en": "Keep the lighthouse unchanged and change the background to a busy modern harbor with container ships and cranes.", "prompt_cn": "保持灯塔不变,将背景改为有集装箱船和起重机的繁忙现代港口。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1258, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/204.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1258/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/846/2.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/846/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1258/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1258/tie_2.png", "save_id": 846, "prompt_en": "Keep the girl unchanged and change the background to a desert with golden sand dunes covering the ground.", "prompt_cn": "保持女子不变,将背景改为沙漠,地面覆盖着金黄色的沙丘。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1259, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/210.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1259/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/847/7.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/847/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1259/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1259/tie_2.png", "save_id": 847, "prompt_en": "Keep the man and the car unchanged, and change the background to a desert highway with sand dunes and a clear blue sky.", "prompt_cn": "保持人物和汽车不变,将背景改为沙丘起伏、蓝天晴朗的沙漠公路。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1260, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/210.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1260/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/847/23.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/847/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1260/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1260/tie_2.png", "save_id": 847, "prompt_en": "Keep the man and the car unchanged, and change the background to a desert highway with sand dunes and a clear blue sky.", "prompt_cn": "保持人物和汽车不变,将背景改为沙丘起伏、蓝天晴朗的沙漠公路。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1261, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/265.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1261/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/848/1.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/848/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1261/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1261/tie_2.png", "save_id": 848, "prompt_en": " Change the background to a snowy landscape while keeping the rabbits and basket unchanged.", "prompt_cn": "将背景更换为下雪的冬季景色,同时保持兔子和篮子不变。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1262, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/290.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1262/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/849/9.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/849/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1262/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1262/tie_2.png", "save_id": 849, "prompt_en": "Keep the shopping cart unchanged, and change the background to the interior of a supermarket.", "prompt_cn": "保持购物车不变,将背景更换为超市内部场景。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1263, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/290.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1263/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/849/9.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/849/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1263/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1263/tie_2.png", "save_id": 849, "prompt_en": "Keep the shopping cart unchanged, and change the background to the interior of a supermarket.", "prompt_cn": "保持购物车不变,将背景更换为超市内部场景。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1264, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/292.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1264/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/850/11.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_Background/850/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1264/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1264/tie_2.png", "save_id": 850, "prompt_en": "Keep the sleigh and gingerbread unchanged, and change the background to a snowy landscape.", "prompt_cn": "雪橇与姜饼保持不变,背景替换为冬季雪地。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1265, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/73.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1265/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/227/13.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/227/16.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1265/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1265/tie_2.png", "save_id": 227, "prompt_en": "Replace the laptop with a rose-gold tablet computer showing a colorful home screen.", "prompt_cn": "将笔记本电脑替换为一台玫瑰金色平板电脑,屏幕上显示彩色主界面。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1266, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/73.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1266/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/227/13.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/227/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1266/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1266/tie_2.png", "save_id": 227, "prompt_en": "Replace the laptop with a rose-gold tablet computer showing a colorful home screen.", "prompt_cn": "将笔记本电脑替换为一台玫瑰金色平板电脑,屏幕上显示彩色主界面。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1267, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/97.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1267/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/232/4.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/232/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1267/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1267/tie_2.png", "save_id": 232, "prompt_en": "Replace the left painting with a world map.", "prompt_cn": "将左侧画作换成一幅世界地图。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1268, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/99.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1268/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/233/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/233/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1268/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1268/tie_2.png", "save_id": 233, "prompt_en": "Replace the wooden bench with a bicycle.", "prompt_cn": "将木长椅替换为一辆自行车。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1269, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/113.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1269/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/236/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/236/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1269/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1269/tie_2.png", "save_id": 236, "prompt_en": "Replace the pitcher on the windowsill with a desk lamp.", "prompt_cn": "将窗台上的陶壶替换成一盏台灯。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1270, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/113.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1270/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/236/21.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/236/22.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1270/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1270/tie_2.png", "save_id": 236, "prompt_en": "Replace the pitcher on the windowsill with a desk lamp.", "prompt_cn": "将窗台上的陶壶替换成一盏台灯。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1271, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/115.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1271/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/237/4.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/237/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1271/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1271/tie_2.png", "save_id": 237, "prompt_en": "Change the glass of milk on the table into a glass of juice.", "prompt_cn": "把桌上的那杯牛奶改成一杯果汁", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1272, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/116.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1272/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/239/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/239/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1272/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1272/tie_2.png", "save_id": 239, "prompt_en": "Swap the red heart in the bear’s hands for a flower.", "prompt_cn": "将小熊手里的红色爱心换成一朵花", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1273, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/119.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1273/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/241/14.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/241/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1273/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1273/tie_2.png", "save_id": 241, "prompt_en": "Replace the red scooter with a bicycle.", "prompt_cn": "将红色摩托车替换成一辆自行车。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1274, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/126.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1274/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/244/1.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/244/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1274/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1274/tie_2.png", "save_id": 244, "prompt_en": "Replace the decorative pillow with a teddy bear.", "prompt_cn": "把圆柱形抱枕换成一只泰迪熊。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1275, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/126.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1275/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/244/13.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/244/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1275/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1275/tie_2.png", "save_id": 244, "prompt_en": "Replace the decorative pillow with a teddy bear.", "prompt_cn": "把圆柱形抱枕换成一只泰迪熊。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1276, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/131.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1276/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/245/20.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/245/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1276/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1276/tie_2.png", "save_id": 245, "prompt_en": "Replace the blue chair with a large 3D solar system model stand.", "prompt_cn": "将蓝色椅子替换为一个大型3D太阳系模型展示架。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1277, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/132.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1277/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/246/2.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/246/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1277/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1277/tie_2.png", "save_id": 246, "prompt_en": "Replace the sign on the left wall with a board showing the periodic table.", "prompt_cn": "将左侧墙上的招牌替换为一块印有元素周期表的牌子。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1278, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/134.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1278/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/247/1.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/247/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1278/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1278/tie_2.png", "save_id": 247, "prompt_en": "Turn the cilantro sprig in the upper right of the plate into a small edible purple orchid.", "prompt_cn": "把盘子右上角的香菜叶变成一朵小巧的可食用紫色兰花", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1279, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/135.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1279/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/248/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/248/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1279/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1279/tie_2.png", "save_id": 248, "prompt_en": "Replace the wall clock with a large round mirror.", "prompt_cn": "将挂钟替换为一面大圆镜子。\n", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1280, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/135.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1280/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/248/23.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Replace/248/20.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1280/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1280/tie_2.png", "save_id": 248, "prompt_en": "Replace the wall clock with a large round mirror.", "prompt_cn": "将挂钟替换为一面大圆镜子。\n", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1281, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/10.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1281/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Extract/692/13.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Extract/692/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1281/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1281/tie_2.png", "save_id": 692, "prompt_en": "Extract the airplane model from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的飞机模型,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1282, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/38.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1282/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Extract/694/1.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Extract/694/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1282/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1282/tie_2.png", "save_id": 694, "prompt_en": "Extract the bench from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的长椅,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1283, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/47.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1283/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Extract/696/15.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Extract/696/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1283/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1283/tie_2.png", "save_id": 696, "prompt_en": "Extract the hanging clock on the side of the building, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取建筑物侧面的挂钟,保持其位置、朝向和姿态不变,并将背景替换为纯白色。 ", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1284, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/59.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1284/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Extract/698/12.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Extract/698/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1284/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1284/tie_2.png", "save_id": 698, "prompt_en": "Extract the yellow lounge chair, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出这个黄色的躺椅,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1285, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/85.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1285/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Extract/702/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Extract/702/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1285/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1285/tie_2.png", "save_id": 702, "prompt_en": "Extract the blue Easter egg and its egg holder in the center, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出中间的蓝色彩蛋和蛋托,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1286, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/124.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1286/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_color_size/554/5.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_color_size/554/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1286/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1286/loser.png", "save_id": 554, "prompt_en": "Change the chair upholstery color from yellow-green to dark navy blue.", "prompt_cn": "将椅子的布面颜色从黄绿色改为深海军蓝色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1287, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/124.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1287/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_color_size/554/1.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_color_size/554/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1287/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1287/tie_2.png", "save_id": 554, "prompt_en": "Change the chair upholstery color from yellow-green to dark navy blue.", "prompt_cn": "将椅子的布面颜色从黄绿色改为深海军蓝色。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1288, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/137.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1288/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_color_size/556/4.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_color_size/556/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1288/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1288/tie_2.png", "save_id": 556, "prompt_en": "Make the stuffed bunny on the bed twice as large.", "prompt_cn": "将床上的毛绒兔子尺寸放大为原来的两倍。\n", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1289, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/142.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1289/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_color_size/558/21.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_color_size/558/22.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1289/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1289/loser.png", "save_id": 558, "prompt_en": "Change the color of the plant question-mark sculpture to bright yellow.", "prompt_cn": "将植物问号雕塑的颜色改为亮黄色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1290, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/142.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1290/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_color_size/558/22.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_color_size/558/21.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1290/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1290/loser.png", "save_id": 558, "prompt_en": "Change the color of the plant question-mark sculpture to bright yellow.", "prompt_cn": "将植物问号雕塑的颜色改为亮黄色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1291, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/241.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1291/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_color_size/570/2.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Change_color_size/570/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1291/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1291/tie_2.png", "save_id": 570, "prompt_en": "Change the black sedan on the right to a bright solid sky blue.", "prompt_cn": "将右侧黑色轿车车身改成亮丽的天蓝色", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1292, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1292/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/132/8.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/132/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1292/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1292/tie_2.png", "save_id": 132, "prompt_en": "Remove the blue overhead traffic sign from the image.", "prompt_cn": "移除蓝色的交通指路牌。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1293, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1293/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/133/1.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/133/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1293/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1293/tie_2.png", "save_id": 133, "prompt_en": "Remove the rainbow from the sky.", "prompt_cn": "移除天空中的彩虹。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1294, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/5.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1294/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/134/3.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/134/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1294/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1294/tie_2.png", "save_id": 134, "prompt_en": "Remove Golden Panda Sculpture.", "prompt_cn": "移除金色熊猫雕塑", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1295, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/10.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1295/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/136/3.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/136/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1295/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1295/tie_2.png", "save_id": 136, "prompt_en": "Remove the magnifying glass from the image.", "prompt_cn": "移除图中的放大镜 ", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1296, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/13.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1296/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/137/4.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/137/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1296/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1296/tie_2.png", "save_id": 137, "prompt_en": "Remove the green plant on the cabinet.", "prompt_cn": "移除柜子上的绿色植物", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1297, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/16.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1297/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/138/2.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/138/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1297/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1297/tie_2.png", "save_id": 138, "prompt_en": "Remove the robot figure on the right.", "prompt_cn": "移除右边的机器人公仔", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1298, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/27.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1298/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/140/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/140/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1298/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1298/tie_2.png", "save_id": 140, "prompt_en": "Remove the heart from the image.", "prompt_cn": "移除图像中的爱心。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1299, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/66.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1299/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/147/12.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/147/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1299/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1299/tie_2.png", "save_id": 147, "prompt_en": "Remove the letter in the center of the image.", "prompt_cn": "移除图片中间的信。 ", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1300, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/66.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1300/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/147/1.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/147/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1300/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1300/tie_2.png", "save_id": 147, "prompt_en": "Remove the letter in the center of the image.", "prompt_cn": "移除图片中间的信。 ", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1301, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/87.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1301/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/152/5.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/152/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1301/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1301/tie_2.png", "save_id": 152, "prompt_en": "Remove the single noticeable white egg from the middle of the carton.", "prompt_cn": "移除蛋盒中唯一那颗显眼的白色鸡蛋", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1302, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/4.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1302/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/183/23.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/183/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1302/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1302/tie_2.png", "save_id": 183, "prompt_en": "Take out all the glassware from the table.", "prompt_cn": "将桌面上的所有玻璃器皿移除。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1303, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/7.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1303/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/185/22.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/185/18.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1303/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1303/tie_2.png", "save_id": 185, "prompt_en": "Remove all writing instruments.", "prompt_cn": "移除所有书写工具。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1304, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/8.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1304/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/186/12.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/186/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1304/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1304/tie_2.png", "save_id": 186, "prompt_en": "Remove all leather furniture.", "prompt_cn": "移除所有皮革家具。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1305, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/10.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1305/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/188/21.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/188/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1305/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1305/tie_2.png", "save_id": 188, "prompt_en": "Remove all black items.", "prompt_cn": "从图像中移除所有黑色物品。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1306, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/12.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1306/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/190/12.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/190/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1306/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1306/tie_2.png", "save_id": 190, "prompt_en": "Remove all spherical objects.", "prompt_cn": "移除所有球形物体。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1307, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_attribute/14.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1307/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/192/1.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/192/23.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1307/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1307/tie_2.png", "save_id": 192, "prompt_en": "Remove all electronic devices.", "prompt_cn": "移除所有电子设备。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1308, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1308/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/196/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/196/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1308/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1308/tie_2.png", "save_id": 196, "prompt_en": "Remove the armchair on the left side.", "prompt_cn": "删除图像左侧的扶手椅。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1309, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/2.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1309/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/197/7.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/197/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1309/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1309/tie_2.png", "save_id": 197, "prompt_en": "Remove the ceramic vase located in the center of the image.", "prompt_cn": "移除中间的陶瓷花瓶。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1310, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/3.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1310/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/198/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/198/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1310/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1310/tie_2.png", "save_id": 198, "prompt_en": "Remove the plant on the far right.", "prompt_cn": "移除最右侧的那盆植物。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1311, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/4.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1311/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/199/5.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/199/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1311/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1311/tie_2.png", "save_id": 199, "prompt_en": "Remove the top book from the stack on the right side of the image.", "prompt_cn": "移除右边最上面的那本书。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1312, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Remove_spatial/7.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1312/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/201/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/Remove/201/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1312/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1312/tie_2.png", "save_id": 201, "prompt_en": "Remove the vehicle that is farther away from the camera.", "prompt_cn": "移除离镜头更远的车辆。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1313, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/19.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1313/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/19/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/19/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1313/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1313/tie_2.png", "save_id": 19, "prompt_en": "Add a happy, jumping golden retriever next to the worker.", "prompt_cn": "在工人旁边添加一只快乐跳跃的金毛犬。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1314, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/20.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1314/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/20/13.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/20/12.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1314/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1314/tie_2.png", "save_id": 20, "prompt_en": "Add a light wood fruit basket containing fresh apples on the table.", "prompt_cn": "在桌子上放置一个浅色木质水果篮装着新鲜苹果", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1315, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/21.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1315/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/21/1.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/21/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1315/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1315/tie_2.png", "save_id": 21, "prompt_en": "Place a small coffee table on the rug in front of the sofa.", "prompt_cn": "在沙发前面的地毯上放一个小茶几。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1316, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/23.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1316/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/24/10.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/24/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1316/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1316/tie_2.png", "save_id": 24, "prompt_en": "Hang a painting on the wall between the two central windows", "prompt_cn": "在两扇窗之间的墙上挂一幅画", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1317, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/23.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1317/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/24/21.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/24/23.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1317/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1317/tie_2.png", "save_id": 24, "prompt_en": "Hang a painting on the wall between the two central windows", "prompt_cn": "在两扇窗之间的墙上挂一幅画", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1318, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/24.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1318/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/25/14.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/25/18.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1318/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1318/tie_2.png", "save_id": 25, "prompt_en": "Place a cutting board on the countertop next to the sink on the right", "prompt_cn": "在右侧水槽旁的操作台上放一个砧板", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1319, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/25.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1319/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/26/16.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/26/17.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1319/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1319/tie_2.png", "save_id": 26, "prompt_en": "Place a basketball on the empty ground to the left of the court.", "prompt_cn": "在篮球场左侧空地上放一个篮球。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1320, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/25.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1320/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/26/17.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/26/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1320/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1320/tie_2.png", "save_id": 26, "prompt_en": "Place a basketball on the empty ground to the left of the court.", "prompt_cn": "在篮球场左侧空地上放一个篮球。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1321, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/28.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1321/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/29/6.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/29/20.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1321/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1321/tie_2.png", "save_id": 29, "prompt_en": "Add a picnic basket on the sand in front left of the table", "prompt_cn": "在桌子左前方的沙滩上添加一个小野餐篮", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1322, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/33.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1322/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/34/2.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/34/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1322/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1322/tie_2.png", "save_id": 34, "prompt_en": "Replace the bedside lamp on the right nightstand with an alarm clock.", "prompt_cn": "将右侧床头柜的台灯换为一个闹钟。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1323, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/35.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1323/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/37/22.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/37/21.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1323/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1323/tie_2.png", "save_id": 37, "prompt_en": "Add a Starbucks iced latte with the logo visible on the table.", "prompt_cn": "在桌子上加入一杯带标志的星巴克冰拿铁 。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1324, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Casual_Reason/1.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1324/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part3/Casual_Aware/1227/9.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part3/Casual_Aware/1227/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1324/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1324/tie_2.png", "save_id": 1227, "prompt_en": "Remove the first book beneath the telescope.", "prompt_cn": "移除望远镜下方最上面的那本书。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1325, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Obj_interaction/7.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1325/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Object_Interaction/1112/8.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Object_Interaction/1112/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1325/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1325/tie_2.png", "save_id": 1112, "prompt_en": "Make the boy and girl hold hands.", "prompt_cn": "让这个男孩和这个女孩牵着手。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1326, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Obj_interaction/9.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1326/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Object_Interaction/1113/6.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Object_Interaction/1113/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1326/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1326/tie_2.png", "save_id": 1113, "prompt_en": "Have the boy hold the bottle of Coke in his right hand and drink from it.", "prompt_cn": "让这个男孩用右手拿着这瓶可乐并正在喝。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1327, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Obj_interaction/9.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1327/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Object_Interaction/1113/16.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Object_Interaction/1113/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1327/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1327/tie_2.png", "save_id": 1113, "prompt_en": "Have the boy hold the bottle of Coke in his right hand and drink from it.", "prompt_cn": "让这个男孩用右手拿着这瓶可乐并正在喝。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1328, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1328/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Move/968/3.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Move/968/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1328/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1328/tie_2.png", "save_id": 968, "prompt_en": "Move the single cherry tomato onto the spoon.”", "prompt_cn": "将那颗单独的樱桃番茄移到勺子上。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1329, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1329/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Move/968/18.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Move/968/22.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1329/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1329/tie_2.png", "save_id": 968, "prompt_en": "Move the single cherry tomato onto the spoon.”", "prompt_cn": "将那颗单独的樱桃番茄移到勺子上。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1330, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/4.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1330/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Move/970/13.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Move/970/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1330/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1330/tie_2.png", "save_id": 970, "prompt_en": "Move the chair to the right side of the sofa.", "prompt_cn": "将椅子移动到沙发的右侧。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1331, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/4.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1331/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Move/970/17.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Move/970/23.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1331/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1331/tie_2.png", "save_id": 970, "prompt_en": "Move the chair to the right side of the sofa.", "prompt_cn": "将椅子移动到沙发的右侧。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1332, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/17.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1332/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Move/981/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Move/981/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1332/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1332/tie_2.png", "save_id": 981, "prompt_en": "Move the table lamp to the right bedside table.", "prompt_cn": "将台灯移到右侧的床头柜上。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1333, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/17.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1333/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Move/981/4.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Move/981/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1333/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1333/tie_2.png", "save_id": 981, "prompt_en": "Move the table lamp to the right bedside table.", "prompt_cn": "将台灯移到右侧的床头柜上。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1334, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/22.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1334/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Move/985/2.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Move/985/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1334/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1334/tie_2.png", "save_id": 985, "prompt_en": "Move the cat onto the sofa.", "prompt_cn": "将猫移动到沙发上。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1335, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Move/22.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1335/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Move/985/22.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Move/985/21.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1335/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1335/tie_2.png", "save_id": 985, "prompt_en": "Move the cat onto the sofa.", "prompt_cn": "将猫移动到沙发上。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1336, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/200.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1336/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part6/Complex/299/5.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part6/Complex/299/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1336/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1336/tie_2.png", "save_id": 299, "prompt_en": "Remove the chair on the left, hang a black suit jacket on the chair on the right, replace the flowers in the vase with roses, and remove the fruits and fruit plate from the image.", "prompt_cn": "移除左侧的椅子,在右侧的椅子上挂一件黑色西装外套,将花瓶里的花替换为玫瑰花,移除图像中的水果和水果盘。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1337, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/205.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1337/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part6/Complex/301/5.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part6/Complex/301/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1337/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1337/tie_2.png", "save_id": 301, "prompt_en": "Add the black text 'Machine Learning' in the center of the laptop screen, remove the circular ornament on the left, place an open heavy dictionary in front of the laptop, and remove the stone in the lower-left corner of the desk.", "prompt_cn": "在笔记本电脑屏幕中央添加一句黑色文字‘Machine Learning’,将左侧圆环形摆件移除,在笔记本电脑前面加入一本翻开的厚重词典,移除桌面左前方的石头。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1338, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/235.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1338/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part6/Complex/304/9.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part6/Complex/304/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1338/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1338/tie_2.png", "save_id": 304, "prompt_en": "Turn on the TV on the wall, showing a Tom and Jerry cartoon. Write 'Successful' on the wall above the TV. Remove the clock in the image, and change all the stools to blue.", "prompt_cn": "打开墙上的电视,显示猫和老鼠的动画片,在电视上方的墙上写上‘Successful’,移除图中的时钟,将所有凳子改为蓝色。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1339, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/303.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1339/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part6/Complex/306/8.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part6/Complex/306/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1339/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1339/tie_2.png", "save_id": 306, "prompt_en": "Have the girl pick up the water bottle in the center and drink from it, change the pants to the same color as the top, and add a white towel around her neck.", "prompt_cn": "让这个女生拿起中间的水瓶喝水,将裤子改为与上衣相同的颜色,并在脖子上挂一条白色毛巾。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1340, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/304.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1340/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part6/Complex/307/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part6/Complex/307/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1340/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1340/tie_2.png", "save_id": 307, "prompt_en": "Add a red scarf to the boy, place a sticky note on the table with the cola, have him pick up the cola, and change his short-sleeved shirt to white.", "prompt_cn": "给这个男孩戴上一条红领巾,在放可乐的桌子上添加一个便利贴,让他拿起桌子上的可乐,并将短袖改为白色。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1341, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/307.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1341/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part6/Complex/310/2.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part6/Complex/310/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1341/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1341/tie_2.png", "save_id": 310, "prompt_en": "Remove the top book on the coffee table, replace the ceramic kettle with a globe, change the sofa to light yellow, and place a sports jacket on the sofa.", "prompt_cn": "删除茶几上最上面的一本书,将茶几上的陶瓷水壶替换为地球仪,将沙发改为淡黄色,并在沙发上放一件运动外套。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1342, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/309.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1342/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part6/Complex/311/13.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part6/Complex/311/17.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1342/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1342/tie_2.png", "save_id": 311, "prompt_en": "Add a park sign next to the path with the text 'Sunset Research Park', change the bench to white and add a person reading on it, and place a tent on the lawn.", "prompt_cn": "在小路旁添加一块公园指示牌,牌子上写‘Sunset Research Park’,将长椅改为白色并在上面添加一位读书的人,在草坪上加入一个帐篷。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1343, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/309.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1343/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part6/Complex/311/21.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part6/Complex/311/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1343/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1343/tie_2.png", "save_id": 311, "prompt_en": "Add a park sign next to the path with the text 'Sunset Research Park', change the bench to white and add a person reading on it, and place a tent on the lawn.", "prompt_cn": "在小路旁添加一块公园指示牌,牌子上写‘Sunset Research Park’,将长椅改为白色并在上面添加一位读书的人,在草坪上加入一个帐篷。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1344, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/313.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1344/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part6/Complex/314/4.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part6/Complex/314/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1344/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1344/tie_2.png", "save_id": 314, "prompt_en": "Add a small dog walking beside the surfer and change the surfboard color to bright yellow.", "prompt_cn": "在冲浪者旁边添加一只小狗,并将冲浪板颜色改为亮黄色", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1345, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/emotion_change/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1345/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Emotion_Change/1179/10.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Emotion_Change/1179/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1345/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1345/tie_2.png", "save_id": 1179, "prompt_en": "Replace the smiling balloon with a frowning one.", "prompt_cn": "将微笑的气球替换为一个皱眉的气球。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1346, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/emotion_change/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1346/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Emotion_Change/1179/22.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Emotion_Change/1179/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1346/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1346/tie_2.png", "save_id": 1179, "prompt_en": "Replace the smiling balloon with a frowning one.", "prompt_cn": "将微笑的气球替换为一个皱眉的气球。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1347, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/emotion_change/5.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1347/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Emotion_Change/1182/6.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Emotion_Change/1182/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1347/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1347/tie_2.png", "save_id": 1182, "prompt_en": "Make him smiling broadly.", "prompt_cn": "让他开怀大笑,露出灿烂的笑容。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1348, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/emotion_change/6.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1348/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Emotion_Change/1183/1.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Emotion_Change/1183/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1348/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1348/tie_2.png", "save_id": 1183, "prompt_en": "Make the baby laughing happily.", "prompt_cn": "让婴儿开心地笑起来。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1349, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/emotion_change/9.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1349/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Emotion_Change/1185/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Emotion_Change/1185/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1349/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1349/tie_2.png", "save_id": 1185, "prompt_en": "Make her laughing with her mouth open.", "prompt_cn": "让她张开嘴笑出来。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1350, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part3/Text_en/1.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1350/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part6/Visual_Text_EN/1737/9.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part6/Visual_Text_EN/1737/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1350/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1350/tie_2.png", "save_id": 1737, "prompt_en": "Remove the large metallic silver English title text “CYBER HUNT” at the top of the poster.", "prompt_cn": "移除画面顶部巨大的银色英文标题“CYBER HUNT”文字效果", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1351, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part3/Text_en/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1351/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part6/Visual_Text_EN/1739/9.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part6/Visual_Text_EN/1739/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1351/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1351/tie_2.png", "save_id": 1739, "prompt_en": "Erase the blood-red English title text “THE GHOST” from the upper center of the image.", "prompt_cn": "擦除画面上方中央血红色的英文标题“THE GHOST”文字", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1352, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part5/Virtual_Try_On/1/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1352/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part5/Virtual_Try_On/2985/16.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part5/Virtual_Try_On/2985/17.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1352/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1352/tie_2.png", "save_id": 2985, "prompt_en": "Change the man’s outfit to the T-shirt from reference image 1 and the jeans from reference image 2", "prompt_cn": "让男子换上参考图像1的T恤和参考图像2的牛仔裤。", "label": "tie", "dimension": "IF", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1352/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1352/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1353, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part5/Virtual_Try_On/2/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1353/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part5/Virtual_Try_On/2986/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part5/Virtual_Try_On/2986/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1353/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1353/tie_2.png", "save_id": 2986, "prompt_en": "Change her outfit to the dress from reference image 1", "prompt_cn": "将她的衣服换成参考图像1的这条裙子。", "label": "tie", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1353/ref_1.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1354, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part5/Virtual_Try_On/4/source.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1354/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part5/Virtual_Try_On/2988/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part5/Virtual_Try_On/2988/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1354/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1354/tie_2.png", "save_id": 2988, "prompt_en": "Replace the model’s outfit with the shirt from reference image 1 and the skirt from reference image 2.", "prompt_cn": "将模特的服装换成参考图像1的衬衫和参考图像2的裙子。", "label": "tie", "dimension": "VC", "reference_image_path": [ "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1354/ref_1.png", "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1354/ref_2.png" ], "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1355, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/4.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1355/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Action/900/1.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Action/900/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1355/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1355/tie_2.png", "save_id": 900, "prompt_en": "Make the Minion hold a bouquet of flowers.", "prompt_cn": "让小黄人拿着一束花。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1356, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Time_Reason/tr_003.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1356/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part3/Time_Aware/1311/20.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part3/Time_Aware/1311/17.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1356/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1356/tie_2.png", "save_id": 1311, "prompt_en": "Show the potato after being left forgotten for one month in a room at room temperature.", "prompt_cn": "展示这个土豆在常温的房子中被遗忘一个月后的样子", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1357, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Obj_interaction/12.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1357/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Object_Interaction/1116/4.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Object_Interaction/1116/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1357/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1357/tie_2.png", "save_id": 1116, "prompt_en": "Make them shake hands.", "prompt_cn": "让他们握手。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1358, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1358/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Action/898/1.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Action/898/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1358/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1358/tie_2.png", "save_id": 898, "prompt_en": "Make the cat open its eyes.", "prompt_cn": "让这只猫把眼睛睁开。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1359, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part2/Action/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1359/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Action/899/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part2/Action/899/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1359/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1359/tie_2.png", "save_id": 899, "prompt_en": "Set the bicycle upright.", "prompt_cn": "将自行车摆放成直立状态。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1360, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1360/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/1/3.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/1/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1360/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1360/tie_2.png", "save_id": 1, "prompt_en": "Add an Adidas logo to the side of the white truck box.", "prompt_cn": "在卡车侧面的白色货厢上添加一个阿迪达斯标志。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1361, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1361/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/3/4.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/3/19.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1361/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1361/tie_2.png", "save_id": 3, "prompt_en": "Add a Los Angeles Lakers Kobe Bryant jersey inside the wooden frame.", "prompt_cn": "在墙上挂着的木质画框内,添加一件洛杉矶湖人队科比·布莱恩特的球衣。 ", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1362, "sampling_model": "Qwen-Image-Edit-2509", "source_image_ori": "Part1/image/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1362/source.png", "winner_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/4/0.png", "loser_src": "/root/data/bxh/OmniBench_EVAL_RM/Edit-R1-Qwen-Image-Edit-2509_t_20_n_0.4/en/Part1/ADD/4/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1362/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1362/tie_2.png", "save_id": 4, "prompt_en": "Place a silver metal pen holder containing several black pens to the left of the computer.", "prompt_cn": "放置一只银色金属笔筒装几支黑笔在电脑的左边。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1568, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1568/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/1/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/1/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1568/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1568/tie_2.png", "save_id": 1, "prompt_en": "Add an Adidas logo to the side of the white truck box.", "prompt_cn": "在卡车侧面的白色货厢上添加一个阿迪达斯标志。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1569, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1569/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/1/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/1/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1569/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1569/loser.png", "save_id": 1, "prompt_en": "Add an Adidas logo to the side of the white truck box.", "prompt_cn": "在卡车侧面的白色货厢上添加一个阿迪达斯标志。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1570, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1570/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/3/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/3/15.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1570/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1570/tie_2.png", "save_id": 3, "prompt_en": "Add a Los Angeles Lakers Kobe Bryant jersey inside the wooden frame.", "prompt_cn": "在墙上挂着的木质画框内,添加一件洛杉矶湖人队科比·布莱恩特的球衣。 ", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1571, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/19.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1571/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/19/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/19/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1571/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1571/tie_2.png", "save_id": 19, "prompt_en": "Add a happy, jumping golden retriever next to the worker.", "prompt_cn": "在工人旁边添加一只快乐跳跃的金毛犬。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1572, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/20.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1572/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/20/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/20/15.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1572/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1572/loser.png", "save_id": 20, "prompt_en": "Add a light wood fruit basket containing fresh apples on the table.", "prompt_cn": "在桌子上放置一个浅色木质水果篮装着新鲜苹果", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1573, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/20.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1573/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/20/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/20/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1573/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1573/loser.png", "save_id": 20, "prompt_en": "Add a light wood fruit basket containing fresh apples on the table.", "prompt_cn": "在桌子上放置一个浅色木质水果篮装着新鲜苹果", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1574, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/34.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1574/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/35/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/35/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1574/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1574/tie_2.png", "save_id": 35, "prompt_en": "Add a kitten on the stool next to the dressing table chair.", "prompt_cn": "在梳妆台椅子旁的凳子上加入一只小猫。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1575, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/41.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1575/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/43/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/43/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1575/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1575/tie_2.png", "save_id": 43, "prompt_en": "Place an orange reflective life jacket on the shore near the boat’s bow.", "prompt_cn": "在靠近船头的岸边放一件橙色反光救生衣", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1576, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/44.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1576/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/46/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/46/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1576/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1576/tie_2.png", "save_id": 46, "prompt_en": "Add a small black digital timer with lit screen on the empty upper-right wooden area.", "prompt_cn": "在右上角空白木板上添加一个小巧黑色数字计时器,屏幕点亮", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1577, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/45.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1577/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/47/14.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/47/15.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1577/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1577/loser.png", "save_id": 47, "prompt_en": "Lean a red-and-white striped fire extinguisher against the wall near the ladder.", "prompt_cn": "在建筑右侧墙边靠着梯子放一个红白条纹灭火器", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1578, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/51.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1578/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/53/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/53/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1578/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1578/tie_2.png", "save_id": 53, "prompt_en": "Add a red cartoon toy car.", "prompt_cn": "加入一个红色的卡通玩具车。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1579, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/68.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1579/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/68/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/68/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1579/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1579/tie_2.png", "save_id": 68, "prompt_en": "Add a pair of white Nike Air Force 1 sneakers on the desk.", "prompt_cn": "在桌子上加入一双 Nike Air Force 1 白色球鞋。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1580, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/83.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1580/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/71/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/71/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1580/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1580/loser.png", "save_id": 71, "prompt_en": "Place a small quadcopter drone in front of the red door.", "prompt_cn": "在红色的门前放置一架小型四轴无人机。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1581, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/148.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1581/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/80/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/80/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1581/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1581/loser.png", "save_id": 80, "prompt_en": "Add a Minion to the road.", "prompt_cn": "在道路上加入一个小黄人。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1582, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/256.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1582/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/85/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/85/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1582/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1582/tie_2.png", "save_id": 85, "prompt_en": "Place a road sign on the grassy area to the left of the road.", "prompt_cn": "在道路旁左侧草地放置一个路牌", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1583, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/267.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1583/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/91/14.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/ADD/91/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1583/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1583/loser.png", "save_id": 91, "prompt_en": "Add a blue cushion on the floor in front of the cabinet.", "prompt_cn": "在柜子前面的地板上添加一个蓝色坐垫。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1584, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/4.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1584/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part2/Action/900/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part2/Action/900/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1584/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1584/tie_2.png", "save_id": 900, "prompt_en": "Make the Minion hold a bouquet of flowers.", "prompt_cn": "让小黄人拿着一束花。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1585, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/6.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1585/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part2/Action/902/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part2/Action/902/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1585/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1585/loser.png", "save_id": 902, "prompt_en": "Make this Pikachu jump up to pick the fruit.", "prompt_cn": "让这只皮卡丘跳起来去摘水果。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1586, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/37.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1586/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part2/Action/932/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part2/Action/932/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1586/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1586/loser.png", "save_id": 932, "prompt_en": "Make the dancer lift her left leg up to waist height.", "prompt_cn": "让舞者将左腿抬起到腰部高度。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1589, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/299.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1589/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Change_Background/851/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Change_Background/851/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1589/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1589/loser.png", "save_id": 851, "prompt_en": "Keep the astronaut and the red terrain unchanged, and change the background to a blue sky.", "prompt_cn": "保持宇航员和红色地貌不动,将背景改为蓝色的天空。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1590, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/382.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1590/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Change_Background/854/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Change_Background/854/12.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1590/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1590/loser.png", "save_id": 854, "prompt_en": "Change the fisherman’s background to a sunrise scene at a clear alpine lake surrounded by mountains.", "prompt_cn": "将钓鱼者的背景改为群山环绕、湖水清澈的高山湖泊日出场景。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1591, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/395.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1591/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Change_Background/862/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Change_Background/862/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1591/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1591/loser.png", "save_id": 862, "prompt_en": "Move the letters from the wooden desk to grass beside a tranquil lakeside.", "prompt_cn": "将信件从木桌移到宁静湖畔的草地上", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1592, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/82.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1592/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/620/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/620/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1592/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1592/tie_2.png", "save_id": 620, "prompt_en": "Transform the scene into a pencil sketch style.", "prompt_cn": "将场景转换为铅笔素描风格。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1593, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/98.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1593/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/624/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/624/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1593/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1593/tie_2.png", "save_id": 624, "prompt_en": "Transform the image into a minimalist flat illustration.", "prompt_cn": "将图像转换为极简扁平插画风格。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1594, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/99.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1594/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/625/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/625/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1594/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1594/tie_2.png", "save_id": 625, "prompt_en": "Transform this image into a Pop Art style.", "prompt_cn": "将这个图像改为波普艺术风格。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1595, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/124.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1595/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/633/14.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/633/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1595/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1595/tie_2.png", "save_id": 633, "prompt_en": "Convert the image to a watercolor painting style.", "prompt_cn": "将整张图像转换为水彩画风格。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1596, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/129.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1596/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/637/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/637/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1596/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1596/tie_2.png", "save_id": 637, "prompt_en": "Transform the image into a realistic style.", "prompt_cn": "将图像转化为写实风格。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1597, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/132.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1597/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/638/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/638/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1597/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1597/tie_2.png", "save_id": 638, "prompt_en": "Transform the image into a Futurism style.", "prompt_cn": "将图像转化为未来主义风格。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1598, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/135.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1598/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/640/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/640/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1598/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1598/loser.png", "save_id": 640, "prompt_en": "Transform the image into an illustration style.", "prompt_cn": "将图像转化为插画风格。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1599, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/145.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1599/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/645/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/645/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1599/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1599/tie_2.png", "save_id": 645, "prompt_en": "Convert the image into an icon-style design.", "prompt_cn": "将图像转化为Icon 图标风。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1600, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/147.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1600/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/647/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/647/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1600/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1600/loser.png", "save_id": 647, "prompt_en": "Transform the image into Mondrian style.", "prompt_cn": "将图像转化为蒙德里安风。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1601, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/158.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1601/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/651/14.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/651/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1601/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1601/loser.png", "save_id": 651, "prompt_en": "Render the image in a line-art comic style.", "prompt_cn": "将图像渲染为线稿漫画风格。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1602, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/165.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1602/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/655/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/655/12.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1602/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1602/loser.png", "save_id": 655, "prompt_en": "Transform the image into a Minimalist style.", "prompt_cn": "将图像转化为极简风格。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1603, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/175.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1603/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/659/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/659/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1603/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1603/loser.png", "save_id": 659, "prompt_en": "Transform the image into a Claymation style.", "prompt_cn": "将图像转化为黏土动画风格。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1604, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/273.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1604/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/668/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/668/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1604/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1604/tie_2.png", "save_id": 668, "prompt_en": "ransform the image style into an American comic style.", "prompt_cn": "将图像的风格转化为美式漫画。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1605, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/335.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1605/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/672/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/672/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1605/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1605/tie_2.png", "save_id": 672, "prompt_en": " Convert the scene into a minimalist flat illustration.", "prompt_cn": "将场景转为极简平面插画风格。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1606, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/405.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1606/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/676/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Style_Transfer/676/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1606/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1606/tie_2.png", "save_id": 676, "prompt_en": "Transform the image into a Japanese anime art style.", "prompt_cn": "将这张图片改为日本动漫风格", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1607, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Change_material/30.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1607/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Change_Material/795/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Change_Material/795/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1607/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1607/loser.png", "save_id": 795, "prompt_en": "Change the handbag to canvas.", "prompt_cn": "把手提包改成帆布材质。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1608, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Change_material/41.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1608/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Change_Material/804/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Change_Material/804/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1608/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1608/tie_2.png", "save_id": 804, "prompt_en": "Turn the tumbler into plastic.", "prompt_cn": "将该随行杯改为由塑料制成。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1609, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/140.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1609/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Change_color_size/557/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Change_color_size/557/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1609/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1609/loser.png", "save_id": 557, "prompt_en": "Reduce the refrigerator to half of its current height while keeping its proportions.", "prompt_cn": "把冰箱缩小到现在的一半高度,但保持比例不变。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1610, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/177.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1610/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Change_color_size/563/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Change_color_size/563/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1610/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1610/loser.png", "save_id": 563, "prompt_en": "Change the color of the shoes to sky blue.", "prompt_cn": "将鞋子的颜色改为天空蓝。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1611, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/312.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1611/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Complex/313/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Complex/313/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1611/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1611/loser.png", "save_id": 313, "prompt_en": "Make the boy look very sad, remove the water bottle from his backpack, and change the backpack to sky blue.", "prompt_cn": "让男孩的表情变得很难过,移除书包中的水杯,并将书包改为天蓝色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1612, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/313.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1612/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Complex/314/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Complex/314/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1612/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1612/loser.png", "save_id": 314, "prompt_en": "Add a small dog walking beside the surfer and change the surfboard color to bright yellow.", "prompt_cn": "在冲浪者旁边添加一只小狗,并将冲浪板颜色改为亮黄色", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1613, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/363.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1613/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Complex/345/14.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Complex/345/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1613/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1613/loser.png", "save_id": 345, "prompt_en": "Swap the positions of the two pairs of shoes, remove the green plant from the image, add a shoebox to the right of the shoes, and change the cushion on the sofa to blue.", "prompt_cn": "交换两双鞋的位置,移除图中的绿植,在鞋的右边添加鞋盒,并将沙发上的抱枕改为蓝色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1614, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/369.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1614/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Complex/351/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Complex/351/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1614/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1614/loser.png", "save_id": 351, "prompt_en": "Remove the scarf from the girl and have her sit on the sofa, replace the green plant in the image with a guitar, and add a blue robot vacuum on the floor.", "prompt_cn": "移除这个女孩的围脖,并让她坐在沙发上,将图像中的绿植替换为吉他,并在地上添加一个蓝色的扫地机器人。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1615, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/373.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1615/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Complex/355/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Complex/355/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1615/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1615/tie_2.png", "save_id": 355, "prompt_en": "Remove the TV on the wall, add a chandelier near the ceiling light, change the wooden desk to glass, and place a laptop on the desk.", "prompt_cn": "移除墙上的电视,在天花板灯光旁添加一个吊灯,将木质办公桌改为玻璃材质,并在办公桌上添加一台笔记本电脑。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1616, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/376.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1616/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Complex/358/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Complex/358/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1616/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1616/tie_2.png", "save_id": 358, "prompt_en": "Replace the globe in the image with a bucket of popcorn, remove the green plant, and change the black-and-white painting on the wall to color.", "prompt_cn": "将图中的地球仪换成一桶爆米花,移除图中的绿植,并将墙上的黑白画改为彩色。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1617, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/376.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1617/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Complex/358/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Complex/358/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1617/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1617/loser.png", "save_id": 358, "prompt_en": "Replace the globe in the image with a bucket of popcorn, remove the green plant, and change the black-and-white painting on the wall to color.", "prompt_cn": "将图中的地球仪换成一桶爆米花,移除图中的绿植,并将墙上的黑白画改为彩色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1618, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/33.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1618/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Complex/418/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Complex/418/15.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1618/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1618/loser.png", "save_id": 418, "prompt_en": "Have the girl stand up and look up at the sky.", "prompt_cn": "让女孩站起来并抬头仰望天空。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1619, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/39.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1619/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Extract/695/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Extract/695/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1619/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1619/loser.png", "save_id": 695, "prompt_en": "Extract the boy riding the bicycle and his bicycle, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中骑自行车的男孩及其自行车,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1620, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/39.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1620/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Extract/695/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Extract/695/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1620/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1620/loser.png", "save_id": 695, "prompt_en": "Extract the boy riding the bicycle and his bicycle, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中骑自行车的男孩及其自行车,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1622, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/141.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1622/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Remove/154/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Remove/154/12.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1622/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1622/loser.png", "save_id": 154, "prompt_en": "Remove the curtains from the image.", "prompt_cn": "移除图中的窗帘。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1623, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/261.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1623/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Remove/158/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Remove/158/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1623/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1623/loser.png", "save_id": 158, "prompt_en": "Remove all the popcorn pieces that are scattered on the table.", "prompt_cn": "移除桌面上所有散落的爆米花粒。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1624, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/277.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1624/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Remove/166/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Remove/166/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1624/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1624/loser.png", "save_id": 166, "prompt_en": "Remove any candles from the composition.", "prompt_cn": "移除图中的蜡烛。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1625, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/388.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1625/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Remove/172/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Remove/172/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1625/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1625/loser.png", "save_id": 172, "prompt_en": "Remove the blanket from the sofa.", "prompt_cn": "移除沙发上的毯子。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1626, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Remove_attribute/6.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1626/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Remove/184/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Remove/184/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1626/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1626/loser.png", "save_id": 184, "prompt_en": "Remove all the blue cars.", "prompt_cn": "移除所有蓝色的汽车。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1627, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Remove_attribute/10.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1627/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Remove/188/13.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Remove/188/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1627/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1627/loser.png", "save_id": 188, "prompt_en": "Remove all black items.", "prompt_cn": "从图像中移除所有黑色物品。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1628, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/119.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1628/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Replace/241/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Replace/241/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1628/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1628/tie_2.png", "save_id": 241, "prompt_en": "Replace the red scooter with a bicycle.", "prompt_cn": "将红色摩托车替换成一辆自行车。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1629, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/135.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1629/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Replace/248/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Replace/248/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1629/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1629/loser.png", "save_id": 248, "prompt_en": "Replace the wall clock with a large round mirror.", "prompt_cn": "将挂钟替换为一面大圆镜子。\n", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1630, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/85.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1630/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Extract/702/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Extract/702/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1630/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1630/loser.png", "save_id": 702, "prompt_en": "Extract the blue Easter egg and its egg holder in the center, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出中间的蓝色彩蛋和蛋托,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1631, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/88.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1631/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Extract/703/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Extract/703/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1631/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1631/loser.png", "save_id": 703, "prompt_en": "Extract the black 8-ball, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出那颗黑色的8号台球,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1632, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/94.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1632/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Extract/705/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Extract/705/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1632/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1632/loser.png", "save_id": 705, "prompt_en": "Extract the soccer ball from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的足球,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1633, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/100.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1633/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Extract/707/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Extract/707/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1633/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1633/loser.png", "save_id": 707, "prompt_en": "Extract the larger pigeon, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出较大的鸽子,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1634, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/153.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1634/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Extract/717/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Extract/717/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1634/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1634/loser.png", "save_id": 717, "prompt_en": "Extract the microphone from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的麦克风,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1635, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/170.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1635/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Extract/724/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Extract/724/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1635/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1635/loser.png", "save_id": 724, "prompt_en": "Extract the blue jumpsuit from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的蓝色连体衣,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1636, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/172.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1636/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Extract/727/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Extract/727/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1636/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1636/loser.png", "save_id": 727, "prompt_en": "Extract the lunar rover from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的月球车,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1637, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/293.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1637/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Extract/735/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Extract/735/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1637/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1637/loser.png", "save_id": 735, "prompt_en": "Extract the balls in the image that are smiling, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中正在微笑的小球,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1638, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/409.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1638/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Extract/751/15.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part1/Extract/751/12.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1638/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1638/loser.png", "save_id": 751, "prompt_en": "Extract the Snoopy figurine wearing a graduation cap and the “Class of 2026” sign from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中戴学士帽的史努比公仔及“Class of 2026”牌子,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1639, "sampling_model": "Bagel-Think", "source_image_ori": "Part3/Text_en/6.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1639/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Visual_Text_EN/1746/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Visual_Text_EN/1746/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1639/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1639/loser.png", "save_id": 1746, "prompt_en": "Change the title 'OCEAN' to 'DESERT'.", "prompt_cn": "将标题“OCEAN”更改为“DESERT”。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1640, "sampling_model": "Bagel-Think", "source_image_ori": "Part3/Text_en/8.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1640/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Visual_Text_EN/1749/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Visual_Text_EN/1749/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1640/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1640/loser.png", "save_id": 1749, "prompt_en": "Remove the large yellow English text and numbers “5 STARS” at the lower left corner.", "prompt_cn": "移除左下角大号的黄色英文数字与单词“5 STARS”", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1641, "sampling_model": "Bagel-Think", "source_image_ori": "Part3/Text_en/15.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1641/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Visual_Text_EN/1756/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Visual_Text_EN/1756/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1641/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1641/tie_2.png", "save_id": 1756, "prompt_en": "Add the light brown English phrase 'Music for the soul' in the lower-left corner.", "prompt_cn": "在左下角添加浅棕色英文短句“Music for the soul”。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1642, "sampling_model": "Bagel-Think", "source_image_ori": "Part3/Text_en/23.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1642/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Visual_Text_EN/1767/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Flux_Kontext_num_inference_steps_21_CPS/en/Part6/Visual_Text_EN/1767/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1642/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1642/loser.png", "save_id": 1767, "prompt_en": "Remove the prominent white “NEW ARRIVAL” English letters from the sky area above.", "prompt_cn": "移除上方天空区域中显眼的白色“NEW ARRIVAL”英文字母", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1363, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1363/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/1/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/1/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1363/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1363/tie_2.png", "save_id": 1, "prompt_en": "Add an Adidas logo to the side of the white truck box.", "prompt_cn": "在卡车侧面的白色货厢上添加一个阿迪达斯标志。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1364, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/8.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1364/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/10/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/10/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1364/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1364/tie_2.png", "save_id": 10, "prompt_en": "Add a pink balloon to the panda’s hand on the far right.", "prompt_cn": "在最右边的熊猫手上加入一个粉色的气球。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1365, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/18.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1365/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/18/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/18/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1365/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1365/loser.png", "save_id": 18, "prompt_en": "Add a steaming ceramic mug of tea to the right of the book on the checkered tablecloth.", "prompt_cn": "在格子桌布上的书右边添加一个冒着热气的陶瓷茶杯。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1366, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/21.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1366/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/21/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/21/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1366/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1366/loser.png", "save_id": 21, "prompt_en": "Place a small coffee table on the rug in front of the sofa.", "prompt_cn": "在沙发前面的地毯上放一个小茶几。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1367, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/27.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1367/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/28/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/28/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1367/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1367/loser.png", "save_id": 28, "prompt_en": "Add an ice bucket for champagne on the sand beside the left chairs", "prompt_cn": "在左侧椅子旁的沙滩上放一个香槟桶", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1368, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/33.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1368/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/34/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/34/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1368/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1368/tie_2.png", "save_id": 34, "prompt_en": "Replace the bedside lamp on the right nightstand with an alarm clock.", "prompt_cn": "将右侧床头柜的台灯换为一个闹钟。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1369, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/40.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1369/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/42/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/42/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1369/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1369/loser.png", "save_id": 42, "prompt_en": "Have the woman without a bag hold a cup of Mixue Lemonade.", "prompt_cn": "让没拿包的女士手里拿着一杯蜜雪冰城的柠檬水。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1370, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/43.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1370/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/45/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/45/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1370/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1370/loser.png", "save_id": 45, "prompt_en": "Add a white t-shirt to the man sitting on the block.", "prompt_cn": "给坐在石块上的男生穿上一件白色的T恤。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1371, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/48.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1371/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/50/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/50/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1371/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1371/loser.png", "save_id": 50, "prompt_en": "Add a boy behind this sled.", "prompt_cn": "给这个雪橇后面加入一个男生。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1372, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/49.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1372/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/51/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/51/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1372/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1372/loser.png", "save_id": 51, "prompt_en": "Add a small silver metal trash bin in the bottom right corner.", "prompt_cn": "在右下角添加一个银色金属垃圾桶。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1373, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/57.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1373/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/58/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/58/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1373/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1373/loser.png", "save_id": 58, "prompt_en": "Place an open silver Apple MacBook in the center of the table", "prompt_cn": "在桌子中央放一台展开的银色苹果 MacBook ", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1374, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/59.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1374/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/60/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/60/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1374/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1374/loser.png", "save_id": 60, "prompt_en": "Place a Los Angeles Lakers LeBron James jersey on the yellow lounge chair.", "prompt_cn": "在黄色躺椅上放一件湖人队勒布朗·詹姆斯的球衣。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1375, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/68.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1375/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/68/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/68/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1375/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1375/loser.png", "save_id": 68, "prompt_en": "Add a pair of white Nike Air Force 1 sneakers on the desk.", "prompt_cn": "在桌子上加入一双 Nike Air Force 1 白色球鞋。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1376, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/104.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1376/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/75/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/75/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1376/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1376/loser.png", "save_id": 75, "prompt_en": "Add a blue-and-white WeChat QR code stand above the sponges.", "prompt_cn": "在海绵上方空白处添加一个蓝白相间的微信二维码立牌。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1377, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/256.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1377/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/85/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/85/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1377/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1377/loser.png", "save_id": 85, "prompt_en": "Place a road sign on the grassy area to the left of the road.", "prompt_cn": "在道路旁左侧草地放置一个路牌", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1378, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/381.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1378/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/97/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/97/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1378/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1378/loser.png", "save_id": 97, "prompt_en": "ADD a small, red knitted beanie with a pom-pom onto the cat's head.", "prompt_cn": "ADD 在猫的头上加一顶带有白色绒球的小型红色针织帽", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1379, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/405.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1379/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/103/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/103/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1379/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1379/loser.png", "save_id": 103, "prompt_en": "Add an origami crane made of tin foil on the seat.", "prompt_cn": "在座椅上加入一个锡纸折成的千纸鹤。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1380, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/418.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1380/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/104/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/104/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1380/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1380/loser.png", "save_id": 104, "prompt_en": "Add a passenger airplane flying in the sky above the embankment.", "prompt_cn": "在河堤上方的天空中添加一架正在飞行的客机", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1381, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Add_Copy/3.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1381/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/114/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/114/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1381/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1381/loser.png", "save_id": 114, "prompt_en": "Add another identical hot air balloon in the lower left area.", "prompt_cn": "在图像的左下区域添加另一个相同的热气球。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1382, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Add_Copy/8.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1382/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/119/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/119/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1382/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1382/loser.png", "save_id": 119, "prompt_en": "Add an identical burger next to the existing one.", "prompt_cn": "在现在的汉堡旁边加入一个和现在汉堡一模一样的汉堡。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1383, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Add_Copy/10.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1383/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/121/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/121/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1383/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1383/loser.png", "save_id": 121, "prompt_en": "Place a pillow identical to the one on the left side of the sofa in the right corner of the sofa.”", "prompt_cn": "在沙发右侧角落放置一个和沙发左侧相同的抱枕。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1384, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Add_Copy/11.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1384/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/122/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/122/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1384/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1384/loser.png", "save_id": 122, "prompt_en": "Add another identical smartphone next to the first one.", "prompt_cn": "在第一部智能手机旁边添加另一部相同的智能手机。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1385, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Add_Copy/11.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1385/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/122/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/122/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1385/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1385/loser.png", "save_id": 122, "prompt_en": "Add another identical smartphone next to the first one.", "prompt_cn": "在第一部智能手机旁边添加另一部相同的智能手机。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1386, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Add_Copy/13.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1386/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/124/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/124/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1386/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1386/loser.png", "save_id": 124, "prompt_en": "Add another golden retriever sitting beside the first one.", "prompt_cn": "在第一只金毛猎犬旁边添加另一只坐着的金毛猎犬。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1387, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Add_Copy/16.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1387/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/127/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/127/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1387/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1387/loser.png", "save_id": 127, "prompt_en": "Hang another identical red lantern next to it.", "prompt_cn": "在它旁边再挂一个一模一样的红灯笼。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1388, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Add_Copy/17.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1388/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/128/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/128/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1388/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1388/loser.png", "save_id": 128, "prompt_en": "Build another identical snowman next to the first one.", "prompt_cn": "在第一个雪人旁边再堆一个相同的雪人。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1389, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Add_Copy/19.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1389/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/130/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/130/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1389/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1389/loser.png", "save_id": 130, "prompt_en": "Duplicate an identical Pikachu next to the existing one.", "prompt_cn": "在现在皮卡丘的旁边复制一个相同的皮卡丘。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1390, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Add_Copy/20.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1390/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/131/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/131/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1390/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1390/loser.png", "save_id": 131, "prompt_en": "Copy an identical game console next to the current one", "prompt_cn": "在现在游戏机的旁边复制一个相同的游戏机。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1391, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/4.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1391/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/900/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/900/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1391/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1391/loser.png", "save_id": 900, "prompt_en": "Make the Minion hold a bouquet of flowers.", "prompt_cn": "让小黄人拿着一束花。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1392, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/5.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1392/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/901/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/901/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1392/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1392/loser.png", "save_id": 901, "prompt_en": "Make the boy look like he is talking on the phone.", "prompt_cn": "让男孩看起来好像正在打电话。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1393, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/5.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1393/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/901/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/901/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1393/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1393/loser.png", "save_id": 901, "prompt_en": "Make the boy look like he is talking on the phone.", "prompt_cn": "让男孩看起来好像正在打电话。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1394, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/8.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1394/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/904/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/904/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1394/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1394/tie_2.png", "save_id": 904, "prompt_en": "Have the dog place its paws on the computer and work hard.", "prompt_cn": "让这只狗把爪子放在电脑上,看起来在努力工作。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1395, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/13.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1395/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/909/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/909/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1395/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1395/loser.png", "save_id": 909, "prompt_en": "Have the girl start doing seated hip abductions.", "prompt_cn": "让女孩开始进行坐姿髋外展动作。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1396, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/19.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1396/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/915/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/915/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1396/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1396/loser.png", "save_id": 915, "prompt_en": "Have the girl pick up and hold the ball in front of her.", "prompt_cn": "让女孩抱起她面前的球。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1397, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/27.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1397/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/922/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/922/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1397/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1397/loser.png", "save_id": 922, "prompt_en": "Change the fist into an open palm.", "prompt_cn": "将握拳变为张开的手掌。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1398, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/31.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1398/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/926/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/926/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1398/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1398/loser.png", "save_id": 926, "prompt_en": "Adjust the girl’s pose so that she is looking straight ahead.", "prompt_cn": "让这个女生向前看.", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1399, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/33.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1399/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/928/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/928/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1399/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1399/tie_2.png", "save_id": 928, "prompt_en": "Make the girl look up at the sky.", "prompt_cn": "让女孩抬头仰望天空。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1400, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/34.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1400/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/929/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/929/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1400/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1400/loser.png", "save_id": 929, "prompt_en": "Have the man wave goodbye to his friend.", "prompt_cn": "让这位男士挥手向朋友告别。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1401, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/34.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1401/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/929/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/929/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1401/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1401/loser.png", "save_id": 929, "prompt_en": "Have the man wave goodbye to his friend.", "prompt_cn": "让这位男士挥手向朋友告别。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1402, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/35.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1402/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/930/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/930/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1402/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1402/loser.png", "save_id": 930, "prompt_en": "Make the woman do a split on the mat.", "prompt_cn": "让这位女性在垫子上做一个劈叉动作。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1403, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/35.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1403/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/930/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/930/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1403/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1403/loser.png", "save_id": 930, "prompt_en": "Make the woman do a split on the mat.", "prompt_cn": "让这位女性在垫子上做一个劈叉动作。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1404, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/38.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1404/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/933/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/933/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1404/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1404/loser.png", "save_id": 933, "prompt_en": "Make the girl in the act of bending down to pick up the toy.", "prompt_cn": "让这个女孩正在弯腰捡地上的玩具。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1405, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/41.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1405/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/936/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/936/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1405/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1405/loser.png", "save_id": 936, "prompt_en": "Make the motorcycle do a wheelie.", "prompt_cn": "让这辆摩托车做一个翘头动作。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1406, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/45.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1406/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/940/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/940/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1406/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1406/loser.png", "save_id": 940, "prompt_en": "Have the dog lie down on the grass.", "prompt_cn": "让这只狗卧在草地上。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1407, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/54.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1407/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/951/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/951/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1407/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1407/loser.png", "save_id": 951, "prompt_en": "Have the girl spread her arms.", "prompt_cn": "让这个女生张开双臂。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1408, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/58.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1408/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/953/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/953/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1408/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1408/loser.png", "save_id": 953, "prompt_en": "Have the boy give a thumbs-up with his right hand.", "prompt_cn": "让这个男孩用右手竖起大拇指。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1409, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/61.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1409/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/956/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/956/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1409/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1409/loser.png", "save_id": 956, "prompt_en": "Make the cat stretch.", "prompt_cn": "让这只猫看起来正在伸展身体。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1410, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Action/62.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1410/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/957/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Action/957/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1410/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1410/loser.png", "save_id": 957, "prompt_en": "Perform a kickflip on a skateboard.", "prompt_cn": "让这个人在滑板上做一个 kickflip 动作。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1411, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Casual_Reason/10.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1411/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1236/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1236/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1411/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1411/loser.png", "save_id": 1236, "prompt_en": "What will happen if you cut the kite’s tether line?", "prompt_cn": "如果你剪断风筝的系绳,会发生什么?", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1412, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Casual_Reason/13.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1412/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1239/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1239/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1412/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1412/tie_2.png", "save_id": 1239, "prompt_en": "What will happen if the hamburger is bitten into?", "prompt_cn": "展示有人咬了一口之后,这个汉堡变成什么样子。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1413, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Casual_Reason/19.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1413/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1244/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1244/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1413/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1413/loser.png", "save_id": 1244, "prompt_en": "What will happen if you click the red circle in the top-left corner, shown on the monitor display?", "prompt_cn": "如果你点击显示器屏幕左上角显示的红色圆圈,会发生什么?", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1414, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Casual_Reason/30.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1414/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1255/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1255/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1414/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1414/loser.png", "save_id": 1255, "prompt_en": "What will happen if you turn off the bathroom’s water supply?", "prompt_cn": "展示当你关闭浴室的供水时会发生什么。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1415, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Casual_Reason/37.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1415/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1262/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1262/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1415/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1415/loser.png", "save_id": 1262, "prompt_en": "What will happen when the rice is fully cooked?", "prompt_cn": "当米饭完全煮熟时会发生什么?", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1416, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Casual_Reason/37.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1416/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1262/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1262/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1416/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1416/loser.png", "save_id": 1262, "prompt_en": "What will happen when the rice is fully cooked?", "prompt_cn": "当米饭完全煮熟时会发生什么?", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1417, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Casual_Reason/41.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1417/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1266/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1266/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1417/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1417/loser.png", "save_id": 1266, "prompt_en": "What will happen if the candle goes out?", "prompt_cn": "展示如果蜡烛熄灭,这个场景会发生什么变化。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1418, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Casual_Reason/47.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1418/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1270/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1270/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1418/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1418/loser.png", "save_id": 1270, "prompt_en": "What will happen if the little boy slowly lets go of the balloon?", "prompt_cn": "如果小男孩缓缓的松开气球会发生什么?", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1419, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Casual_Reason/52.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1419/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1275/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1275/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1419/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1419/loser.png", "save_id": 1275, "prompt_en": "What will happen if you unplug the power cord from the socket?", "prompt_cn": "如果你从插座上拔下电源线会发生什么?", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1420, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Casual_Reason/54.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1420/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1277/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1277/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1420/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1420/loser.png", "save_id": 1277, "prompt_en": "What will happen if you place a dry paper towel over the puddle?", "prompt_cn": "如果你把一张干的纸巾放在这个水坑上,会发生什么?", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1421, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Casual_Reason/54.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1421/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1277/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1277/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1421/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1421/loser.png", "save_id": 1277, "prompt_en": "What will happen if you place a dry paper towel over the puddle?", "prompt_cn": "如果你把一张干的纸巾放在这个水坑上,会发生什么?", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1422, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Casual_Reason/72.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1422/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1293/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1293/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1422/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1422/loser.png", "save_id": 1293, "prompt_en": "Show the gift after it has been opened.", "prompt_cn": "展示礼物在被打开之后的样子。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1423, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Casual_Reason/74.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1423/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1295/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1295/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1423/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1423/loser.png", "save_id": 1295, "prompt_en": "What happens when water is poured onto the paper shown in the image, making it wet?", "prompt_cn": "如果将水倒在了图中的纸上并将纸弄湿会发生什么?", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1424, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Casual_Reason/77.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1424/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1296/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1296/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1424/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1424/loser.png", "save_id": 1296, "prompt_en": "Show the skin shortly after a mosquito bite.", "prompt_cn": "展示皮肤在刚被蚊子叮咬后不久的样子。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1425, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Casual_Reason/89.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1425/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1306/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Casual_Aware/1306/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1425/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1425/loser.png", "save_id": 1306, "prompt_en": "Show the lake immediately after a stone is thrown in.", "prompt_cn": "展示在一块石头被扔进湖里之后立刻的湖面。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1426, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/81.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1426/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/829/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/829/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1426/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1426/tie_2.png", "save_id": 829, "prompt_en": "Keep the dog unchanged and change the background to the interior of a library filled with bookshelves.", "prompt_cn": "保持狗不变,将背景改为书架林立的图书馆内部。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1427, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/98.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1427/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/832/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/832/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1427/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1427/tie_2.png", "save_id": 832, "prompt_en": "Keep the bicycle, flowers, and watering can unchanged, and change the background to a sandy beach with the ocean and blue sky.", "prompt_cn": "保持自行车、花朵和浇水壶不变,将背景改为有海浪和蓝天的沙滩。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1428, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/105.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1428/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/833/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/833/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1428/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1428/tie_2.png", "save_id": 833, "prompt_en": "Keep the fruits and bowl unchanged, and change the background to an indoor kitchen countertop with cabinets.", "prompt_cn": "保持水果和碗不变,将背景改为室内厨房料理台和橱柜。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1429, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/158.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1429/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/843/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/843/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1429/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1429/tie_2.png", "save_id": 843, "prompt_en": "Keep the lighthouse unchanged and change the background to a busy modern harbor with container ships and cranes.", "prompt_cn": "保持灯塔不变,将背景改为有集装箱船和起重机的繁忙现代港口。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1430, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/184.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1430/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/845/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/845/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1430/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1430/loser.png", "save_id": 845, "prompt_en": "Keep the elderly man and the heart in the sand unchanged, and change the background to the lunar surface under a starry sky.", "prompt_cn": "保持老人和沙地心形不变,将背景改为星空下的月球表面。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1431, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/381.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1431/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/853/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/853/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1431/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1431/loser.png", "save_id": 853, "prompt_en": "Change background to a sunny forest clearing filled with wildflowers.", "prompt_cn": "将背景改为有野花的阳光森林空地", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1432, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/381.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1432/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/853/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/853/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1432/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1432/loser.png", "save_id": 853, "prompt_en": "Change background to a sunny forest clearing filled with wildflowers.", "prompt_cn": "将背景改为有野花的阳光森林空地", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1433, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/386.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1433/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/857/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/857/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1433/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1433/loser.png", "save_id": 857, "prompt_en": "Replace the background of the school bus and the children with a busy city street at night.", "prompt_cn": "将校车和孩子的背景替换为夜晚繁华城市街道。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1434, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/398.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1434/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/864/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/864/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1434/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1434/loser.png", "save_id": 864, "prompt_en": "Move the astronaut to a golden beach at sunset with gentle ocean waves.", "prompt_cn": "将宇航员移到日落时分的金色沙滩上", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1435, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/407.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1435/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/872/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/872/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1435/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1435/loser.png", "save_id": 872, "prompt_en": "Keep the Doraemon figurine and its “STAND BY ME” base unchanged, and change the sunlit Japanese-style room and wooden desk background in the image to a futuristic 23rd-century cityscape.", "prompt_cn": "保持哆啦A梦手办及其“STAND BY ME”底座不变,将图片中充满阳光的日式房间和木制书桌的背景更改为一个充满未来感的23世纪城市景观。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1436, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/412.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1436/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/876/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/876/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1436/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1436/loser.png", "save_id": 876, "prompt_en": "Keep the skateboarding LEGO Stormtrooper and its gear unchanged, and change the shopping mall interior background in the image to a sunny beach.", "prompt_cn": "保持正在滑板的乐高冲锋队员及其装备不变,将图片中商场内部的背景更改为一个阳光明媚的海滩。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1437, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/425.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1437/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/885/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/885/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1437/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1437/loser.png", "save_id": 885, "prompt_en": "Keep SpongeBob unchanged, and change the indoor scene to a sunny beach with the sea.", "prompt_cn": "保持海绵宝宝不变,将室内场景改成阳光明媚的沙滩和大海。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1438, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/431.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1438/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/888/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Background/888/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1438/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1438/loser.png", "save_id": 888, "prompt_en": "Keep the person, guitar, and table unchanged, and change the background to a daytime forest campsite.", "prompt_cn": "保持人物、吉他和桌子不变,将背景改为白天的森林露营地。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1439, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Change_material/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1439/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Material/774/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Material/774/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1439/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1439/loser.png", "save_id": 774, "prompt_en": "Make the armchair made of brown leather.", "prompt_cn": "将扶手椅的材质改为棕色皮革。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1440, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Change_material/5.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1440/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Material/777/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Material/777/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1440/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1440/loser.png", "save_id": 777, "prompt_en": "Make the tablecloth silk.", "prompt_cn": "将桌布的材质改为丝绸。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1441, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Change_material/9.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1441/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Material/781/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Material/781/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1441/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1441/tie_2.png", "save_id": 781, "prompt_en": "Change the carpet to woven velvet.", "prompt_cn": "把地毯变成编织丝绒材质。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1442, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Change_material/15.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1442/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Material/785/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Material/785/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1442/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1442/loser.png", "save_id": 785, "prompt_en": "Change the bedside table to pine wood.", "prompt_cn": "将床头柜改成松木的。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1443, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Change_material/16.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1443/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Material/786/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Material/786/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1443/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1443/loser.png", "save_id": 786, "prompt_en": "Turn the table into a metal one.", "prompt_cn": "将这张桌子变成一张金属桌子。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1444, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Change_material/30.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1444/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Material/795/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Material/795/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1444/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1444/loser.png", "save_id": 795, "prompt_en": "Change the handbag to canvas.", "prompt_cn": "把手提包改成帆布材质。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1445, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Change_material/45.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1445/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Material/807/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Material/807/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1445/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1445/loser.png", "save_id": 807, "prompt_en": "Change the teapot to porcelain.", "prompt_cn": "将茶壶更换为瓷质茶壶。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1446, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Change_material/51.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1446/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Material/812/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_Material/812/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1446/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1446/loser.png", "save_id": 812, "prompt_en": "Change the elephant to jade.", "prompt_cn": "将这只大象修改为一只玉制大象。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1447, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/121.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1447/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/552/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/552/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1447/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1447/loser.png", "save_id": 552, "prompt_en": "Change the jockey’s red helmet on the leading horse to bright neon green.", "prompt_cn": "将前方赛马骑手的红色头盔改为鲜亮的荧光绿色。\n", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1448, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/177.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1448/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/563/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/563/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1448/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1448/loser.png", "save_id": 563, "prompt_en": "Change the color of the shoes to sky blue.", "prompt_cn": "将鞋子的颜色改为天空蓝。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1449, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/241.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1449/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/570/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/570/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1449/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1449/loser.png", "save_id": 570, "prompt_en": "Change the black sedan on the right to a bright solid sky blue.", "prompt_cn": "将右侧黑色轿车车身改成亮丽的天蓝色", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1450, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/251.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1450/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/574/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/574/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1450/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1450/loser.png", "save_id": 574, "prompt_en": "Change the toothbrush handle to a solid, vivid sapphire blue color.", "prompt_cn": "将牙刷刷柄改成纯正的宝石蓝色", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1451, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/257.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1451/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/578/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/578/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1451/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1451/loser.png", "save_id": 578, "prompt_en": "Change the canvas bag in the image to light blue.", "prompt_cn": "将图中的帆布包改为淡蓝色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1452, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/264.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1452/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/585/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/585/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1452/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1452/loser.png", "save_id": 585, "prompt_en": "Turn the painter’s T‑shirt color to black.", "prompt_cn": "把画工的T恤颜色改成黑色。\n", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1453, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/267.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1453/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/587/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/587/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1453/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1453/loser.png", "save_id": 587, "prompt_en": "Change the color of the wall to a clean solid white.", "prompt_cn": "将墙面颜色改成纯净的白色", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1454, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/270.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1454/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/589/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/589/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1454/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1454/loser.png", "save_id": 589, "prompt_en": "Change the bookshelf color from terracotta to matte black.", "prompt_cn": "把书架从砖红色改为哑光黑。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1455, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/274.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1455/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/591/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/591/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1455/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1455/loser.png", "save_id": 591, "prompt_en": "Scale down the robot on the left to be the same size as the cup next to it.", "prompt_cn": "将左边的机器人缩小到和旁边的杯子一样大。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1456, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/274.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1456/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/591/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/591/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1456/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1456/loser.png", "save_id": 591, "prompt_en": "Scale down the robot on the left to be the same size as the cup next to it.", "prompt_cn": "将左边的机器人缩小到和旁边的杯子一样大。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1457, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/289.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1457/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/601/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/601/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1457/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1457/loser.png", "save_id": 601, "prompt_en": "Change the helmet in the image to white.", "prompt_cn": "将图中的头盔变为白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1458, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/441.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1458/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/613/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/613/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1458/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1458/loser.png", "save_id": 613, "prompt_en": "Enlarge the brush in the image to twice its original size.", "prompt_cn": "将图中的刷子的尺寸扩大一倍。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1459, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/441.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1459/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/613/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Change_color_size/613/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1459/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1459/loser.png", "save_id": 613, "prompt_en": "Enlarge the brush in the image to twice its original size.", "prompt_cn": "将图中的刷子的尺寸扩大一倍。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1460, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/emotion_change/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1460/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1180/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1180/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1460/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1460/loser.png", "save_id": 1180, "prompt_en": "Make the cat look very fierce.”", "prompt_cn": "让这只猫看起来非常凶猛。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1461, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/emotion_change/4.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1461/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1181/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1181/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1461/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1461/loser.png", "save_id": 1181, "prompt_en": "Make the boy sink into deep thought.", "prompt_cn": "让这个男孩看起来正在深思。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1462, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/emotion_change/4.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1462/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1181/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1181/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1462/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1462/loser.png", "save_id": 1181, "prompt_en": "Make the boy sink into deep thought.", "prompt_cn": "让这个男孩看起来正在深思。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1463, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/emotion_change/16.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1463/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1189/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1189/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1463/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1463/loser.png", "save_id": 1189, "prompt_en": "Make her look disappointed and frowning.", "prompt_cn": "让她看起来很失望,并且皱着眉头。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1464, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/emotion_change/21.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1464/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1192/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1192/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1464/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1464/loser.png", "save_id": 1192, "prompt_en": "Make the superhero look defeated and sad.", "prompt_cn": "让这位超级英雄看起来被打败并且很悲伤。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1465, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/emotion_change/26.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1465/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1195/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1195/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1465/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1465/loser.png", "save_id": 1195, "prompt_en": "Make her look angry and scolding.", "prompt_cn": "让她看起来很生气,好像正在责骂某个人。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1466, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/emotion_change/26.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1466/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1195/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1195/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1466/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1466/loser.png", "save_id": 1195, "prompt_en": "Make her look angry and scolding.", "prompt_cn": "让她看起来很生气,好像正在责骂某个人。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1467, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/emotion_change/28.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1467/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1196/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1196/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1467/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1467/tie_2.png", "save_id": 1196, "prompt_en": "Make him look annoyed and frowning.", "prompt_cn": "让他看起来很恼火,并露出皱眉的表情。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1468, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/emotion_change/33.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1468/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1200/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1200/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1468/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1468/loser.png", "save_id": 1200, "prompt_en": "Make her look surprised by what she is reading.", "prompt_cn": "让她看起来对自己正在阅读的内容感到惊讶。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1469, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/emotion_change/36.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1469/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1202/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1202/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1469/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1469/loser.png", "save_id": 1202, "prompt_en": "Make him look terrified.", "prompt_cn": "让他看起来非常害怕。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1470, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/emotion_change/51.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1470/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1213/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1213/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1470/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1470/tie_2.png", "save_id": 1213, "prompt_en": "Make the student yawning.", "prompt_cn": "让这位学生打哈欠。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1471, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/emotion_change/51.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1471/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1213/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1213/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1471/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1471/tie_2.png", "save_id": 1213, "prompt_en": "Make the student yawning.", "prompt_cn": "让这位学生打哈欠。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1472, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/emotion_change/57.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1472/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1217/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1217/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1472/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1472/loser.png", "save_id": 1217, "prompt_en": "Make him look serious and thinking.", "prompt_cn": "让他看起来很严肃,并且陷入深思之中。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1473, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/emotion_change/60.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1473/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1218/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1218/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1473/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1473/loser.png", "save_id": 1218, "prompt_en": "Make him look meditated and calm.", "prompt_cn": "让他看起来处于冥想状态并且很平静。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1474, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/emotion_change/62.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1474/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1220/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1220/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1474/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1474/tie_2.png", "save_id": 1220, "prompt_en": "Make him look utterly confused, scratching his head.", "prompt_cn": "让他看起来极度困惑,并且正在挠头。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1475, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/emotion_change/63.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1475/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1221/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1221/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1475/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1475/loser.png", "save_id": 1221, "prompt_en": "Make the dog tilt its head to the side in confusion.", "prompt_cn": "让这只狗把头歪向一侧,就好像它很困惑一样。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1476, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/emotion_change/66.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1476/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1223/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1223/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1476/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1476/loser.png", "save_id": 1223, "prompt_en": "Make this little boy look lost in thought.", "prompt_cn": "让这个小男孩正在发呆。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1477, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/emotion_change/72.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1477/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1225/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Emotion_Change/1225/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1477/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1477/loser.png", "save_id": 1225, "prompt_en": "Make him look very impatient.", "prompt_cn": "让他看起来很不耐烦。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1478, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/10.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1478/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/692/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/692/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1478/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1478/loser.png", "save_id": 692, "prompt_en": "Extract the airplane model from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的飞机模型,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1479, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/16.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1479/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/693/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/693/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1479/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1479/loser.png", "save_id": 693, "prompt_en": "Extract the robot figure on the left, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出左边的机器人公仔,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1480, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/16.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1480/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/693/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/693/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1480/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1480/loser.png", "save_id": 693, "prompt_en": "Extract the robot figure on the left, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出左边的机器人公仔,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1481, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/38.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1481/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/694/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/694/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1481/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1481/loser.png", "save_id": 694, "prompt_en": "Extract the bench from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的长椅,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1482, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/84.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1482/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/701/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/701/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1482/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1482/loser.png", "save_id": 701, "prompt_en": "Extract the yellow motorcycle from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出黄色的摩托车,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1483, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/85.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1483/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/702/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/702/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1483/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1483/loser.png", "save_id": 702, "prompt_en": "Extract the blue Easter egg and its egg holder in the center, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出中间的蓝色彩蛋和蛋托,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1484, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/88.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1484/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/703/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/703/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1484/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1484/loser.png", "save_id": 703, "prompt_en": "Extract the black 8-ball, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出那颗黑色的8号台球,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1485, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/88.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1485/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/703/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/703/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1485/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1485/loser.png", "save_id": 703, "prompt_en": "Extract the black 8-ball, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出那颗黑色的8号台球,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1486, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/94.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1486/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/705/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/705/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1486/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1486/loser.png", "save_id": 705, "prompt_en": "Extract the soccer ball from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的足球,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1487, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/100.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1487/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/707/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/707/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1487/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1487/loser.png", "save_id": 707, "prompt_en": "Extract the larger pigeon, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出较大的鸽子,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1488, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/110.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1488/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/711/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/711/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1488/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1488/loser.png", "save_id": 711, "prompt_en": "Extract the mask from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的口罩,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1489, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/110.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1489/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/711/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/711/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1489/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1489/loser.png", "save_id": 711, "prompt_en": "Extract the mask from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的口罩,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1490, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/163.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1490/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/720/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/720/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1490/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1490/loser.png", "save_id": 720, "prompt_en": "Extract the model sailboat from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的帆船模型,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1491, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/163.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1491/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/720/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/720/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1491/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1491/loser.png", "save_id": 720, "prompt_en": "Extract the model sailboat from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的帆船模型,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1492, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/167.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1492/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/721/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/721/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1492/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1492/loser.png", "save_id": 721, "prompt_en": "Extract the two people holding hands from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中手拉手的两个人,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1493, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/167.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1493/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/721/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/721/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1493/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1493/loser.png", "save_id": 721, "prompt_en": "Extract the two people holding hands from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中手拉手的两个人,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1494, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/169.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1494/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/723/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/723/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1494/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1494/loser.png", "save_id": 723, "prompt_en": "Extract the snowman toy from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中的雪人玩偶,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1495, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/170.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1495/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/724/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/724/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1495/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1495/loser.png", "save_id": 724, "prompt_en": "Extract the blue jumpsuit from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的蓝色连体衣,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1496, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/172.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1496/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/727/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/727/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1496/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1496/loser.png", "save_id": 727, "prompt_en": "Extract the lunar rover from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的月球车,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1497, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/172.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1497/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/727/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/727/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1497/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1497/loser.png", "save_id": 727, "prompt_en": "Extract the lunar rover from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的月球车,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1498, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/293.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1498/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/735/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/735/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1498/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1498/loser.png", "save_id": 735, "prompt_en": "Extract the balls in the image that are smiling, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中正在微笑的小球,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1499, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/336.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1499/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/743/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/743/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1499/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1499/loser.png", "save_id": 743, "prompt_en": "Extract the two heart-shaped cookies from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中两个的心形饼干,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1500, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/337.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1500/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/744/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/744/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1500/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1500/loser.png", "save_id": 744, "prompt_en": "Extract the flowers and the basket from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的花和篮子,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1501, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/351.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1501/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/746/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/746/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1501/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1501/loser.png", "save_id": 746, "prompt_en": "Extract the largest pumpkin from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出最大的南瓜,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1502, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/409.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1502/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/751/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/751/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1502/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1502/tie_2.png", "save_id": 751, "prompt_en": "Extract the Snoopy figurine wearing a graduation cap and the “Class of 2026” sign from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中戴学士帽的史努比公仔及“Class of 2026”牌子,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1503, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/458.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1503/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/758/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/758/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1503/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1503/tie_2.png", "save_id": 758, "prompt_en": "Extract the car from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图像中的这辆车,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1504, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/474.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1504/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/765/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/765/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1504/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1504/loser.png", "save_id": 765, "prompt_en": "Extract the main body of the entire house from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取图像中的整栋房屋主体,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1505, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/495.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1505/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/770/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Extract/770/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1505/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1505/loser.png", "save_id": 770, "prompt_en": "Extract the newspaper from the image, keeping its position, orientation, and pose unchanged, and replace the background with pure white.", "prompt_cn": "提取出图中的报纸,保持其位置、朝向和姿态不变,并将背景替换为纯白色。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1506, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Math_Reason/11.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1506/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Math_Aware/1459/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Math_Aware/1459/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1506/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1506/tie_2.png", "save_id": 1459, "prompt_en": "Increase the cars until the number is double the original.", "prompt_cn": "将汽车的数量增加到原来的两倍。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1507, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Math_Reason/25.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1507/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Math_Aware/1472/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Math_Aware/1472/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1507/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1507/loser.png", "save_id": 1472, "prompt_en": "Remove two-thirds of the chocolate squares.", "prompt_cn": "删除三分之二的巧克力方块。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1508, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Math_Reason/60.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1508/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Math_Aware/1502/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Math_Aware/1502/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1508/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1508/tie_2.png", "save_id": 1502, "prompt_en": "Draw the largest possible circle that fits inside the square.", "prompt_cn": "在正方形内画出尽可能大的圆。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1509, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Move/4.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1509/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/970/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/970/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1509/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1509/loser.png", "save_id": 970, "prompt_en": "Move the chair to the right side of the sofa.", "prompt_cn": "将椅子移动到沙发的右侧。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1510, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Move/4.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1510/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/970/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/970/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1510/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1510/tie_2.png", "save_id": 970, "prompt_en": "Move the chair to the right side of the sofa.", "prompt_cn": "将椅子移动到沙发的右侧。", "label": "tie", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1511, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Move/9.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1511/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/975/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/975/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1511/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1511/tie_2.png", "save_id": 975, "prompt_en": "Move the paper clip to the right.", "prompt_cn": "将纸夹移动到右侧。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1512, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Move/9.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1512/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/975/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/975/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1512/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1512/loser.png", "save_id": 975, "prompt_en": "Move the paper clip to the right.", "prompt_cn": "将纸夹移动到右侧。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1513, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Move/9.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1513/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/975/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/975/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1513/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1513/loser.png", "save_id": 975, "prompt_en": "Move the paper clip to the right.", "prompt_cn": "将纸夹移动到右侧。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1514, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Move/15.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1514/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/980/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/980/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1514/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1514/loser.png", "save_id": 980, "prompt_en": "Move the painting to the right.", "prompt_cn": "将这幅画向右移动。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1515, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Move/15.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1515/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/980/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/980/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1515/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1515/loser.png", "save_id": 980, "prompt_en": "Move the painting to the right.", "prompt_cn": "将这幅画向右移动。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1516, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Move/19.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1516/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/982/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/982/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1516/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1516/tie_2.png", "save_id": 982, "prompt_en": "Move the stack of books onto the nearby bookshelf.", "prompt_cn": "将那叠书移动到附近的书架上。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1517, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Move/22.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1517/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/985/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/985/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1517/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1517/tie_2.png", "save_id": 985, "prompt_en": "Move the cat onto the sofa.", "prompt_cn": "将猫移动到沙发上。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1518, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Move/23.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1518/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/986/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/986/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1518/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1518/loser.png", "save_id": 986, "prompt_en": "Move the apple onto the white tablecloth.", "prompt_cn": "请将苹果移动到白色的桌布上。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1519, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Move/33.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1519/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/995/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/995/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1519/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1519/tie_2.png", "save_id": 995, "prompt_en": "Move the red car to the right side of the street.", "prompt_cn": "将红色汽车移动到街道的右侧。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1520, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Move/45.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1520/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/1002/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/1002/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1520/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1520/loser.png", "save_id": 1002, "prompt_en": "Move the tent to the right side.", "prompt_cn": "将帐篷移动到右侧。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1521, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Move/94.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1521/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/1029/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/1029/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1521/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1521/loser.png", "save_id": 1029, "prompt_en": "Move the blue sphere to the right of the yellow cylinder.", "prompt_cn": "将蓝色球体移动到黄色圆柱的右边。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1522, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Obj_interaction/3.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1522/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1108/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1108/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1522/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1522/loser.png", "save_id": 1108, "prompt_en": "Have the girl practice lat pulldowns.", "prompt_cn": "让这个女孩做高位下拉动作。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1523, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Obj_interaction/3.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1523/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1108/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1108/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1523/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1523/loser.png", "save_id": 1108, "prompt_en": "Have the girl practice lat pulldowns.", "prompt_cn": "让这个女孩做高位下拉动作。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1524, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Obj_interaction/4.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1524/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1109/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1109/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1524/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1524/loser.png", "save_id": 1109, "prompt_en": "Have this girl practice seated hip abduction.", "prompt_cn": "让这个女孩做坐姿髋外展训练。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1525, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Obj_interaction/5.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1525/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1110/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1110/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1525/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1525/loser.png", "save_id": 1110, "prompt_en": "Make the man and woman hug each other tightly.", "prompt_cn": "让这名男子和女子紧紧拥抱在一起。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1526, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Obj_interaction/11.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1526/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1115/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1115/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1526/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1526/tie_2.png", "save_id": 1115, "prompt_en": "Make the two players high five.", "prompt_cn": "让这两名球员互相击掌。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1527, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Obj_interaction/15.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1527/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1119/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1119/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1527/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1527/tie_2.png", "save_id": 1119, "prompt_en": "Make the woman perform a barbell squat.", "prompt_cn": "让这名女子进行杠铃深蹲动作。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1528, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Obj_interaction/17.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1528/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1121/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1121/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1528/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1528/tie_2.png", "save_id": 1121, "prompt_en": "Have the man perform a bench press, lifting the barbell.", "prompt_cn": "让这个男人正在做卧推,举起杠铃。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1529, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Obj_interaction/27.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1529/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1130/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1130/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1529/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1529/loser.png", "save_id": 1130, "prompt_en": "Make the boy brush his teeth.", "prompt_cn": "让这个男孩刷牙。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1530, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Obj_interaction/44.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1530/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1144/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1144/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1530/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1530/tie_2.png", "save_id": 1144, "prompt_en": "Make the student kick the sandbag.", "prompt_cn": "让这名学员踢沙袋。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1531, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Obj_interaction/44.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1531/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1144/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1144/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1531/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1531/loser.png", "save_id": 1144, "prompt_en": "Make the student kick the sandbag.", "prompt_cn": "让这名学员踢沙袋。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1532, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Obj_interaction/75.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1532/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1161/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1161/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1532/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1532/loser.png", "save_id": 1161, "prompt_en": "Make the knight pull the sword out of the stone.", "prompt_cn": "让骑士把剑从石头中拔出来。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1533, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Obj_interaction/79.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1533/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1165/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1165/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1533/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1533/tie_2.png", "save_id": 1165, "prompt_en": "Make the weightlifter lift the barbell overhead.", "prompt_cn": "让举重运动员把杠铃举到头顶上方。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1534, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Obj_interaction/80.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1534/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1166/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1166/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1534/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1534/tie_2.png", "save_id": 1166, "prompt_en": "Make the person bend down and pick up the hat.", "prompt_cn": "让这个人弯下腰并捡起帽子。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1535, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Obj_interaction/92.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1535/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1172/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1172/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1535/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1535/loser.png", "save_id": 1172, "prompt_en": "Make the red and blue bumper cars collide with each other.", "prompt_cn": "让红色和蓝色的碰碰车彼此相撞。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1536, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1536/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Remove/133/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Remove/133/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1536/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1536/loser.png", "save_id": 133, "prompt_en": "Remove the rainbow from the sky.", "prompt_cn": "移除天空中的彩虹。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1537, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/13.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1537/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Remove/137/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Remove/137/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1537/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1537/loser.png", "save_id": 137, "prompt_en": "Remove the green plant on the cabinet.", "prompt_cn": "移除柜子上的绿色植物", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1538, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/30.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1538/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Remove/142/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Remove/142/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1538/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1538/loser.png", "save_id": 142, "prompt_en": "Remove the umbrella in the air.", "prompt_cn": "移除空中的雨伞。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1539, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/66.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1539/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Remove/147/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Remove/147/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1539/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1539/loser.png", "save_id": 147, "prompt_en": "Remove the letter in the center of the image.", "prompt_cn": "移除图片中间的信。 ", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1540, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/245.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1540/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Remove/155/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Remove/155/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1540/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1540/loser.png", "save_id": 155, "prompt_en": "Remove the small boat from the image.", "prompt_cn": "移除图中的小船。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1541, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/416.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1541/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Remove/177/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Remove/177/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1541/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1541/loser.png", "save_id": 177, "prompt_en": "Remove the largest rubber duck from the image.", "prompt_cn": "移除图中的最大的橡皮鸭。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1542, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Remove_attribute/1.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1542/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Remove/181/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Remove/181/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1542/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1542/tie_2.png", "save_id": 181, "prompt_en": "Remove all red objects from the scene.", "prompt_cn": "从场景中移除所有红色物体。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1543, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Remove_attribute/7.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1543/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Remove/185/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Remove/185/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1543/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1543/tie_2.png", "save_id": 185, "prompt_en": "Remove all writing instruments.", "prompt_cn": "移除所有书写工具。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1544, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/95.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1544/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Replace/231/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Replace/231/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1544/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1544/tie_2.png", "save_id": 231, "prompt_en": "Replace the mug with a DSLR camera.", "prompt_cn": "将杯子替换为一台单反相机。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1545, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/116.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1545/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Replace/239/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Replace/239/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1545/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1545/tie_2.png", "save_id": 239, "prompt_en": "Swap the red heart in the bear’s hands for a flower.", "prompt_cn": "将小熊手里的红色爱心换成一朵花", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1546, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/135.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1546/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Replace/248/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Replace/248/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1546/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1546/loser.png", "save_id": 248, "prompt_en": "Replace the wall clock with a large round mirror.", "prompt_cn": "将挂钟替换为一面大圆镜子。\n", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1547, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/137.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1547/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Replace/249/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Replace/249/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1547/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1547/loser.png", "save_id": 249, "prompt_en": "Replace the stuffed bunny on the bed with a large, yellow rubber duck wearing sunglasses.", "prompt_cn": "将床上的毛绒兔子替换为一个戴着太阳镜的、大号黄色橡皮鸭。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1548, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/139.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1548/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Replace/251/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Replace/251/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1548/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1548/tie_2.png", "save_id": 251, "prompt_en": "Change the light purple bedspread to a Pikachu-themed design.", "prompt_cn": "将浅紫色床罩改为皮卡丘主题设计。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1549, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/161.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1549/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Replace/257/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Replace/257/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1549/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1549/loser.png", "save_id": 257, "prompt_en": "Replace the parrot perched on the branch with a Minions collectible figure.", "prompt_cn": "把这只站在树枝上的鹦鹉换成一个小黄人公仔", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1550, "sampling_model": "Bagel-Think", "source_image_ori": "Part1/image/180.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1550/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Replace/260/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/Replace/260/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1550/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1550/tie_2.png", "save_id": 260, "prompt_en": "Replace the teddy bear in the crib with a Mickey Mouse plush toy.", "prompt_cn": "把婴儿床里的小熊玩偶换成一个米老鼠毛绒公仔", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1551, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Time_Reason/tr_004.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1551/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1312/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1312/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1551/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1551/loser.png", "save_id": 1312, "prompt_en": "Show the bread after being left in a damp room for a week.", "prompt_cn": "将面包展示为在潮湿房间里放置一周之后的样子。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1552, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Time_Reason/tr_011.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1552/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1318/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1318/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1552/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1552/loser.png", "save_id": 1318, "prompt_en": "Show how her hair would look after not being cut for one year.", "prompt_cn": "如果她一年没剪头发会怎么样?", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1553, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Time_Reason/tr_011.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1553/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1318/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1318/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1553/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1553/loser.png", "save_id": 1318, "prompt_en": "Show how her hair would look after not being cut for one year.", "prompt_cn": "如果她一年没剪头发会怎么样?", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1554, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Time_Reason/tr_013.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1554/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1320/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1320/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1554/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1554/loser.png", "save_id": 1320, "prompt_en": "Show the skin after three weeks of healing.", "prompt_cn": "将皮肤表现为愈合三周后的样子。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1555, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Time_Reason/tr_015.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1555/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1322/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1322/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1555/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1555/loser.png", "save_id": 1322, "prompt_en": "What happens when this instant noodle is cooked for 20 minutes?", "prompt_cn": "如果这些方便面被煮 20 分钟,会发生什么情况?", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1556, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Time_Reason/tr_016.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1556/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1323/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1323/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1556/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1556/tie_2.png", "save_id": 1323, "prompt_en": "Show how this bicycle would look after being abandoned outdoors for ten years.", "prompt_cn": "将这辆自行车展示成在户外被遗弃10年它会变成怎么样?", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1557, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Time_Reason/tr_022.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1557/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1329/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1329/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1557/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1557/loser.png", "save_id": 1329, "prompt_en": "Depict the appearance of this soap after thirty shower uses.", "prompt_cn": "展示这块香皂在经过30次淋浴使用后的样子。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1558, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Time_Reason/tr_028.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1558/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1335/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1335/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1558/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1558/loser.png", "save_id": 1335, "prompt_en": "What happens to beer fifteen minutes later?", "prompt_cn": "展示十五分钟后啤酒会发生的变化。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1559, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Time_Reason/tr_032.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1559/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1339/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1339/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1559/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1559/tie_2.png", "save_id": 1339, "prompt_en": "Show how the milk will look after ten minutes at room temperature.", "prompt_cn": "展示牛奶在常温的房间中十分钟后会变成怎么样?", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1560, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Time_Reason/tr_032.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1560/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1339/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1339/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1560/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1560/tie_2.png", "save_id": 1339, "prompt_en": "Show how the milk will look after ten minutes at room temperature.", "prompt_cn": "展示牛奶在常温的房间中十分钟后会变成怎么样?", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1561, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Time_Reason/tr_032.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1561/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1339/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1339/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1561/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1561/tie_2.png", "save_id": 1339, "prompt_en": "Show how the milk will look after ten minutes at room temperature.", "prompt_cn": "展示牛奶在常温的房间中十分钟后会变成怎么样?", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1562, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Time_Reason/tr_032.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1562/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1339/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1339/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1562/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1562/loser.png", "save_id": 1339, "prompt_en": "Show how the milk will look after ten minutes at room temperature.", "prompt_cn": "展示牛奶在常温的房间中十分钟后会变成怎么样?", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1563, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Time_Reason/tr_042.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1563/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1349/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1349/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1563/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1563/loser.png", "save_id": 1349, "prompt_en": "Show the pancake after 6 minutes of cooking.", "prompt_cn": "展示这块煎饼在已经煎了 6 分钟之后的样子。", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1564, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Time_Reason/tr_040.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1564/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1347/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1347/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1564/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1564/loser.png", "save_id": 1347, "prompt_en": "What will the road surface look like after a few days of vehicle traffic?", "prompt_cn": "经过几天车辆通行后,道路表面会是什么样子?", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1565, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Time_Reason/tr_046.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1565/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1351/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1351/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1565/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VC/1565/loser.png", "save_id": 1351, "prompt_en": "What will a parked car look like after being covered by snow overnight?", "prompt_cn": "一辆停着的汽车在一夜之间被雪覆盖之后会是什么样子?", "label": "preference", "dimension": "VC", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1566, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Time_Reason/tr_054.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1566/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1358/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1358/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1566/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1566/loser.png", "save_id": 1358, "prompt_en": "How will the cup look after ten minutes?", "prompt_cn": "展示这个杯子在十分钟后会呈现出的样子。", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1567, "sampling_model": "Bagel-Think", "source_image_ori": "Part2/Time_Reason/tr_094.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1567/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1392/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part3/Time_Aware/1392/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1567/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1567/loser.png", "save_id": 1392, "prompt_en": "What does the medicine look like before it's taken?", "prompt_cn": "药没吃前是什么样子?", "label": "preference", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1568, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part1/image/19.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1568/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part1/ADD/19/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part1/ADD/19/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1568/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1568/loser.png", "save_id": 19, "prompt_en": "Add a happy, jumping golden retriever next to the worker.", "prompt_cn": "在工人旁边添加一只快乐跳跃的金毛犬。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1569, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part1/image/38.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1569/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part1/ADD/39/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part1/ADD/39/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1569/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1569/loser.png", "save_id": 39, "prompt_en": "Place a light gray fabric cushion in the middle of the wooden bench.", "prompt_cn": "在长椅中央放一个浅灰色布艺靠垫", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1570, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part2/Action/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1570/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Action/899/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Action/899/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1570/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1570/loser.png", "save_id": 899, "prompt_en": "Set the bicycle upright.", "prompt_cn": "将自行车摆放成直立状态。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1571, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part2/Action/9.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1571/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Action/905/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Action/905/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1571/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1571/loser.png", "save_id": 905, "prompt_en": "Position the girl lying on the chair.", "prompt_cn": "让女孩躺在椅子上。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1572, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part2/Action/13.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1572/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Action/909/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Action/909/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1572/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1572/loser.png", "save_id": 909, "prompt_en": "Have the girl start doing seated hip abductions.", "prompt_cn": "让女孩开始进行坐姿髋外展动作。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1573, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part2/Action/15.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1573/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Action/911/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Action/911/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1573/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1573/loser.png", "save_id": 911, "prompt_en": "Make the girl point both of her hands to the left.", "prompt_cn": "让女孩把双手都指向左侧。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1574, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part2/Action/19.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1574/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Action/915/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Action/915/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1574/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1574/loser.png", "save_id": 915, "prompt_en": "Have the girl pick up and hold the ball in front of her.", "prompt_cn": "让女孩抱起她面前的球。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1575, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part1/image/81.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1575/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part1/Change_Background/829/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part1/Change_Background/829/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1575/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1575/loser.png", "save_id": 829, "prompt_en": "Keep the dog unchanged and change the background to the interior of a library filled with bookshelves.", "prompt_cn": "保持狗不变,将背景改为书架林立的图书馆内部。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1576, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part1/image/382.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1576/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part1/Change_Background/854/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part1/Change_Background/854/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1576/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1576/loser.png", "save_id": 854, "prompt_en": "Change the fisherman’s background to a sunrise scene at a clear alpine lake surrounded by mountains.", "prompt_cn": "将钓鱼者的背景改为群山环绕、湖水清澈的高山湖泊日出场景。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1577, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part2/Change_material/25.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1577/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part1/Change_Material/792/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part1/Change_Material/792/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1577/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1577/loser.png", "save_id": 792, "prompt_en": "Change the jacket to leather.", "prompt_cn": "将外套改为皮质的。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1578, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part2/Move/9.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1578/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Move/975/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Move/975/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1578/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1578/loser.png", "save_id": 975, "prompt_en": "Move the paper clip to the right.", "prompt_cn": "将纸夹移动到右侧。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1579, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part2/Move/10.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1579/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Move/976/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Move/976/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1579/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1579/loser.png", "save_id": 976, "prompt_en": "Move the cards to the correct positions to complete the puzzle.", "prompt_cn": "将这些拼图块移动到正确的位置以完成拼图。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1580, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part2/Move/11.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1580/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Move/977/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Move/977/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1580/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1580/loser.png", "save_id": 977, "prompt_en": "Move the spoon into the bowl.", "prompt_cn": "将勺子移入碗中。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1581, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part2/Remove_attribute/8.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1581/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part1/Remove/186/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part1/Remove/186/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1581/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1581/loser.png", "save_id": 186, "prompt_en": "Remove all leather furniture.", "prompt_cn": "移除所有皮革家具。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1582, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part2/Obj_interaction/1.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1582/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Object_Interaction/1106/14.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Object_Interaction/1106/12.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1582/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1582/loser.png", "save_id": 1106, "prompt_en": "Have the boy perform a bicep curl with the dumbbells next to him.", "prompt_cn": "让男孩用他旁边的哑铃做一个二头肌弯举动作。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1583, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part2/Obj_interaction/2.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1583/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Object_Interaction/1107/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Object_Interaction/1107/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1583/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1583/loser.png", "save_id": 1107, "prompt_en": "Have the girl pick up the two dumbbells at her feet and perform a chest press.", "prompt_cn": "让女孩捡起脚边的两个哑铃并进行卧推动作。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1584, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part2/Obj_interaction/13.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1584/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Object_Interaction/1117/14.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Object_Interaction/1117/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1584/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1584/loser.png", "save_id": 1117, "prompt_en": "Have the older brother lift up his younger sister.", "prompt_cn": "让哥哥把妹妹抱起来。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1585, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part2/Obj_interaction/14.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1585/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Object_Interaction/1118/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Object_Interaction/1118/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1585/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1585/loser.png", "save_id": 1118, "prompt_en": "Have the two girls compare the sizes of their left hands.", "prompt_cn": "让这两个女孩在比左手手掌的大小。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1586, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part2/Obj_interaction/15.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1586/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Object_Interaction/1119/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Object_Interaction/1119/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1586/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1586/loser.png", "save_id": 1119, "prompt_en": "Make the woman perform a barbell squat.", "prompt_cn": "让这名女子进行杠铃深蹲动作。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1587, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part2/Obj_interaction/16.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1587/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Object_Interaction/1120/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Object_Interaction/1120/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1587/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1587/loser.png", "save_id": 1120, "prompt_en": "Make the woman wave the battle ropes vigorously.", "prompt_cn": "让这名女子剧烈地挥动战绳。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1588, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part2/Obj_interaction/17.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1588/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Object_Interaction/1121/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Object_Interaction/1121/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1588/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1588/loser.png", "save_id": 1121, "prompt_en": "Have the man perform a bench press, lifting the barbell.", "prompt_cn": "让这个男人正在做卧推,举起杠铃。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1589, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part2/Swap/14.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1589/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Swap/1050/15.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part2/Swap/1050/14.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1589/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1589/loser.png", "save_id": 1050, "prompt_en": "Swap the poster of LeBron James in the top-left corner with the poster of Kobe Bryant in the bottom-right corner.", "prompt_cn": "将左上角的勒布朗·詹姆斯海报与右下角的科比·布莱恩特海报互换位置。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1590, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part3/Text_cn/1.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1590/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part6/Visual_Text_CN/1599/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part6/Visual_Text_CN/1599/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1590/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1590/loser.png", "save_id": 1599, "prompt_en": "Change the title '江湖往事' to '剑影萍踪'", "prompt_cn": "将标题文字“江湖往事”更改为“剑影萍踪”。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1591, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part3/Text_cn/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1591/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part6/Visual_Text_CN/1604/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part6/Visual_Text_CN/1604/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1591/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1591/loser.png", "save_id": 1604, "prompt_en": "Change the actor name '吴京' to '沈腾'", "prompt_cn": "将演员姓名“吴京”更改为“沈腾”。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1592, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part3/Text_en/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1592/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part6/Visual_Text_EN/1739/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part6/Visual_Text_EN/1739/15.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1592/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1592/loser.png", "save_id": 1739, "prompt_en": "Erase the blood-red English title text “THE GHOST” from the upper center of the image.", "prompt_cn": "擦除画面上方中央血红色的英文标题“THE GHOST”文字", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1593, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part3/Text_en/15.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1593/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part6/Visual_Text_EN/1757/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part6/Visual_Text_EN/1757/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1593/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1593/loser.png", "save_id": 1757, "prompt_en": "Remove the text 'SYMPHONY ORCHESTRA'.", "prompt_cn": "移除“SYMPHONY ORCHESTRA” 字体。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1594, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part3/Text_en/18.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1594/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part6/Visual_Text_EN/1760/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part6/Visual_Text_EN/1760/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1594/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1594/loser.png", "save_id": 1760, "prompt_en": "Erase the large curved “HARVEST MOON FOLK FEST” English title text at the top.", "prompt_cn": "擦除画面顶部大字的“HARVEST MOON FOLK FEST”英文标题文本", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1595, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part3/Text_en/21.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1595/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part6/Visual_Text_EN/1764/12.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part6/Visual_Text_EN/1764/13.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1595/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1595/loser.png", "save_id": 1764, "prompt_en": "Add light-blue, handwritten-style text 'Beach' to the left of the sunglasses on the sand at the bottom of the image.", "prompt_cn": "在画面下方沙滩上太阳镜的左侧,添加浅蓝色手写风格文字‘Beach’。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1596, "sampling_model": "Qwen-Image-Edit", "source_image_ori": "Part3/Text_en/25.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1596/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part6/Visual_Text_EN/1770/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Qwen-Image-Edit/en/Part6/Visual_Text_EN/1770/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1596/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1596/loser.png", "save_id": 1770, "prompt_en": "Add smaller glowing white sans-serif text \"New Generation\" above the central \"iPhone 14\" title.", "prompt_cn": "在画面顶部中央的“iPhone 14”上方添加小一号白色无衬线发光文字“New Generation”", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1597, "sampling_model": "OmniGen2", "source_image_ori": "Part2/Action/15.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1597/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part2/Action/911/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part2/Action/911/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1597/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1597/loser.png", "save_id": 911, "prompt_en": "Make the girl point both of her hands to the left.", "prompt_cn": "让女孩把双手都指向左侧。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1598, "sampling_model": "OmniGen2", "source_image_ori": "Part2/Action/23.jpeg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1598/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part2/Action/918/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part2/Action/918/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1598/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1598/loser.png", "save_id": 918, "prompt_en": "Have the girl sit on the ground.", "prompt_cn": "让这个小女孩坐在地上。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1599, "sampling_model": "OmniGen2", "source_image_ori": "Part2/Action/30.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1599/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part2/Action/925/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part2/Action/925/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1599/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1599/loser.png", "save_id": 925, "prompt_en": "Have the boy cross his legs.", "prompt_cn": "让这个男生跷起二郎腿。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1600, "sampling_model": "OmniGen2", "source_image_ori": "Part2/Action/31.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1600/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part2/Action/926/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part2/Action/926/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1600/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1600/tie_2.png", "save_id": 926, "prompt_en": "Adjust the girl’s pose so that she is looking straight ahead.", "prompt_cn": "让这个女生向前看.", "label": "tie", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1601, "sampling_model": "OmniGen2", "source_image_ori": "Part2/Action/35.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1601/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part2/Action/930/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part2/Action/930/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1601/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1601/loser.png", "save_id": 930, "prompt_en": "Make the woman do a split on the mat.", "prompt_cn": "让这位女性在垫子上做一个劈叉动作。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1602, "sampling_model": "OmniGen2", "source_image_ori": "Part2/Action/37.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1602/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part2/Action/932/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part2/Action/932/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1602/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1602/loser.png", "save_id": 932, "prompt_en": "Make the dancer lift her left leg up to waist height.", "prompt_cn": "让舞者将左腿抬起到腰部高度。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1603, "sampling_model": "OmniGen2", "source_image_ori": "Part1/image/309.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1603/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part6/Complex/311/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part6/Complex/311/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1603/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/IF/1603/tie_2.png", "save_id": 311, "prompt_en": "Add a park sign next to the path with the text 'Sunset Research Park', change the bench to white and add a person reading on it, and place a tent on the lawn.", "prompt_cn": "在小路旁添加一块公园指示牌,牌子上写‘Sunset Research Park’,将长椅改为白色并在上面添加一位读书的人,在草坪上加入一个帐篷。", "label": "tie", "dimension": "IF", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1604, "sampling_model": "OmniGen2", "source_image_ori": "Part1/image/309.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1604/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part6/Complex/311/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part6/Complex/311/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1604/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1604/loser.png", "save_id": 311, "prompt_en": "Add a park sign next to the path with the text 'Sunset Research Park', change the bench to white and add a person reading on it, and place a tent on the lawn.", "prompt_cn": "在小路旁添加一块公园指示牌,牌子上写‘Sunset Research Park’,将长椅改为白色并在上面添加一位读书的人,在草坪上加入一个帐篷。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1605, "sampling_model": "OmniGen2", "source_image_ori": "Part1/image/320.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1605/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part6/Complex/318/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part6/Complex/318/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1605/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1605/loser.png", "save_id": 318, "prompt_en": "Remove the shoe that can be worn on the left foot in the image, remove the tennis ball farthest from the camera, change the background to light blue, and write 'Practice' in green chalk-style text above the tennis ball.", "prompt_cn": "移除图中鞋子可以穿在左脚上的鞋,移除离镜头最远的网球,将背景改为淡蓝色,并在网球上方用绿色粉笔风格文字写上‘Practice’。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1606, "sampling_model": "OmniGen2", "source_image_ori": "Part2/Obj_interaction/35.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1606/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part2/Object_Interaction/1136/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part2/Object_Interaction/1136/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1606/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1606/tie_2.png", "save_id": 1136, "prompt_en": "Make the boy eat the noodles held by the chopsticks.", "prompt_cn": "让男孩吃筷子上的面条。", "label": "tie", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1607, "sampling_model": "OmniGen2", "source_image_ori": "Part2/Obj_interaction/44.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1607/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part2/Object_Interaction/1144/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part2/Object_Interaction/1144/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1607/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1607/loser.png", "save_id": 1144, "prompt_en": "Make the student kick the sandbag.", "prompt_cn": "让这名学员踢沙袋。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1608, "sampling_model": "OmniGen2", "source_image_ori": "Part2/Swap/52.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1608/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part2/Swap/1081/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/OmniGen2/en/Part2/Swap/1081/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1608/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1608/tie_2.png", "save_id": 1081, "prompt_en": "Swap the colors of the tractor and the sports car.", "prompt_cn": "交换拖拉机和跑车的颜色。", "label": "tie", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1609, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/7.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1609/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/903/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/903/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1609/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1609/loser.png", "save_id": 903, "prompt_en": "Have the girl who is taking the phone photo make a peace sign with her free hand.", "prompt_cn": "让正在用手机拍照的女生,用另一只空着的手做出一个“耶”的手势。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1610, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/14.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1610/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/910/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/910/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1610/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1610/tie_2.png", "save_id": 910, "prompt_en": "Change the girl’s pose to a horse stance.", "prompt_cn": "将女孩的姿势改为马步。", "label": "tie", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1611, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/16.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1611/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/912/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/912/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1611/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1611/loser.png", "save_id": 912, "prompt_en": "Make the girl open her eyes and sit up.", "prompt_cn": "让这个女孩睁开眼睛并坐起来。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1612, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/34.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1612/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/929/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/929/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1612/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1612/loser.png", "save_id": 929, "prompt_en": "Have the man wave goodbye to his friend.", "prompt_cn": "让这位男士挥手向朋友告别。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1613, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/49.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1613/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/944/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/944/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1613/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1613/loser.png", "save_id": 944, "prompt_en": "Have the cat lie down on the lawn.", "prompt_cn": "让这只猫卧在草坪上。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1614, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/50.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1614/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/945/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/945/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1614/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1614/loser.png", "save_id": 945, "prompt_en": "Make the waiter bow.", "prompt_cn": "让服务员鞠躬。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1615, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/51.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1615/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/946/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/946/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1615/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1615/loser.png", "save_id": 946, "prompt_en": "Have the boy perform a powerful, LeBron James–style signature stride.", "prompt_cn": "让这个男孩做出篮球明星勒布朗·詹姆斯标志性的‘霸王步’风格动作。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1616, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/55.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1616/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/949/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/949/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1616/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1616/loser.png", "save_id": 949, "prompt_en": "Have this boy zip up his jacket.", "prompt_cn": "让这个男生正在拉夹克的拉链。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1617, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/60.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1617/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/955/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/955/2.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1617/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1617/loser.png", "save_id": 955, "prompt_en": "Make her blow the bubble wand.", "prompt_cn": "让她吹泡泡棒。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1618, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/62.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1618/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/957/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/957/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1618/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1618/loser.png", "save_id": 957, "prompt_en": "Perform a kickflip on a skateboard.", "prompt_cn": "让这个人在滑板上做一个 kickflip 动作。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1619, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/65.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1619/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/960/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/960/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1619/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1619/loser.png", "save_id": 960, "prompt_en": "Have this boy kneeling on one knee.", "prompt_cn": "让这个男生单膝下跪", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1620, "sampling_model": "Bagel", "source_image_ori": "Part2/Action/70.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1620/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/965/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Action/965/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1620/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1620/loser.png", "save_id": 965, "prompt_en": "Have this boy take a starting position for a 100-meter sprint.", "prompt_cn": "让这个男生做出百米起跑的动作。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1621, "sampling_model": "Bagel", "source_image_ori": "Part1/image/137.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1621/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/556/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/556/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1621/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1621/loser.png", "save_id": 556, "prompt_en": "Make the stuffed bunny on the bed twice as large.", "prompt_cn": "将床上的毛绒兔子尺寸放大为原来的两倍。\n", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1622, "sampling_model": "Bagel", "source_image_ori": "Part1/image/206.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1622/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/567/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/567/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1622/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1622/loser.png", "save_id": 567, "prompt_en": "Change the height of the chair next to the desk to 2/3 of its current height.", "prompt_cn": "将图中桌子旁的椅子高度改为当前的2/3。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1623, "sampling_model": "Bagel", "source_image_ori": "Part1/image/252.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1623/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/575/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/575/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1623/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1623/loser.png", "save_id": 575, "prompt_en": "Change both headphones to a premium matte dark gray color.", "prompt_cn": "将两个耳机改为高级的哑光深灰色", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1624, "sampling_model": "Bagel", "source_image_ori": "Part1/image/443.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1624/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/614/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part1/Change_color_size/614/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1624/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1624/loser.png", "save_id": 614, "prompt_en": "Shrink the central black coffee cup to half of its current height.", "prompt_cn": "将中间黑色咖啡杯缩小到现在的一半高度。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1625, "sampling_model": "Bagel", "source_image_ori": "Part1/image/201.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1625/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/300/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/300/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1625/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1625/loser.png", "save_id": 300, "prompt_en": "Remove the 'Stop' sign and the pink helmet, and add a skateboard on the road.", "prompt_cn": "移除‘Stop’的标志和粉色的头盔,在路上加入一个滑板。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1626, "sampling_model": "Bagel", "source_image_ori": "Part1/image/307.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1626/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/310/11.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/310/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1626/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1626/loser.png", "save_id": 310, "prompt_en": "Remove the top white book on the coffee table, replace the ceramic kettle with a globe, change the sofa to light yellow, and place a sports jacket on the sofa.", "prompt_cn": "移除茶几上最上面的那本白色书,将茶几上的陶瓷水壶替换为地球仪,将沙发改为淡黄色,并在沙发上放一件运动外套。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1627, "sampling_model": "Bagel", "source_image_ori": "Part1/image/315.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1627/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/315/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/315/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1627/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1627/loser.png", "save_id": 315, "prompt_en": "Remove the two red lounge chairs and add a pink swim ring in the pool.", "prompt_cn": "移除两张红色躺椅,在游泳池中加入一个粉色的游泳圈。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1628, "sampling_model": "Bagel", "source_image_ori": "Part1/image/365.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1628/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/347/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/347/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1628/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1628/loser.png", "save_id": 347, "prompt_en": "Have the girl place both feet on the ground, display a helpless gesture and expression, and change her top to yellow.", "prompt_cn": "让这个女孩双脚放在地面上,做出无奈的动作和表情,并将上衣改为黄色。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1629, "sampling_model": "Bagel", "source_image_ori": "Part1/image/365.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1629/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/347/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/347/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1629/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1629/loser.png", "save_id": 347, "prompt_en": "Have the girl place both feet on the ground, display a helpless gesture and expression, and change her top to yellow.", "prompt_cn": "让这个女孩双脚放在地面上,做出无奈的动作和表情,并将上衣改为黄色。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1630, "sampling_model": "Bagel", "source_image_ori": "Part1/image/366.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1630/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/348/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/348/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1630/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1630/loser.png", "save_id": 348, "prompt_en": "Remove the cactus from the image, swap the positions of the pen and the glasses, and add a neon sign reading 'Creative Hub' in the top-left corner.", "prompt_cn": "移除图中的仙人掌,交换笔和眼镜的位置,并在左上角添加一个写着‘Creative Hub’的霓虹字标牌。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1631, "sampling_model": "Bagel", "source_image_ori": "Part1/image/449.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1631/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/372/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part6/Complex/372/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1631/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1631/loser.png", "save_id": 372, "prompt_en": "Move the silver mirror to the wall on the left side of the door, add a whiteboard above the door with the artistic text 'HOME SWEET HOME', change the door color to a bright red, and add a pink suitcase to the right of the door.", "prompt_cn": "将银色镜子移到门的左侧墙面,在门的上方添加一块白板,并在白板上写上艺术字体的‘HOME SWEET HOME’,同时将门的颜色改为鲜艳的红色,并在门的右侧添加一个粉色的旅行箱。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1632, "sampling_model": "Bagel", "source_image_ori": "Part2/Swap/1.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1632/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1037/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1037/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1632/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1632/loser.png", "save_id": 1037, "prompt_en": "Swap the positions of the cup and the spoon.", "prompt_cn": "交换杯子和勺子的的位置。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1633, "sampling_model": "Bagel", "source_image_ori": "Part2/Swap/17.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1633/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1053/3.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1053/8.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1633/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1633/loser.png", "save_id": 1053, "prompt_en": "Swap the toy car with the teddy bear.", "prompt_cn": "将玩具小汽车与泰迪熊互相交换位置。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1634, "sampling_model": "Bagel", "source_image_ori": "Part2/Swap/29.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1634/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1062/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1062/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1634/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1634/loser.png", "save_id": 1062, "prompt_en": "Swap the dumbbell and the water bottle.", "prompt_cn": "交换哑铃和水瓶的位置。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1635, "sampling_model": "Bagel", "source_image_ori": "Part2/Swap/50.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1635/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1079/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1079/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1635/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1635/loser.png", "save_id": 1079, "prompt_en": "Swap the hair colors of the two children.", "prompt_cn": "交换两个孩子的头发颜色。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1636, "sampling_model": "Bagel", "source_image_ori": "Part2/Swap/51.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1636/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1080/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1080/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1636/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1636/loser.png", "save_id": 1080, "prompt_en": "Swap the hairstyles of the blonde woman and the brunette woman.", "prompt_cn": "将金发女子和棕发女子的发型对调。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1637, "sampling_model": "Bagel", "source_image_ori": "Part2/Swap/67.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1637/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1095/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1095/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1637/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1637/loser.png", "save_id": 1095, "prompt_en": "Swap the states and positions of the cat and the dog.", "prompt_cn": "交换猫和狗的状态和位置。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1638, "sampling_model": "Bagel", "source_image_ori": "Part2/Swap/68.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1638/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1096/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1096/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1638/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1638/tie_2.png", "save_id": 1096, "prompt_en": "Swap the objects being eaten by the boy and the girl.", "prompt_cn": "交换男孩和女孩吃的物体。", "label": "tie", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1639, "sampling_model": "Bagel", "source_image_ori": "Part2/Swap/70.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1639/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1098/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1098/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1639/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1639/loser.png", "save_id": 1098, "prompt_en": "Swap the poses of the two models.", "prompt_cn": "交换两个模特的姿势。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1640, "sampling_model": "Bagel", "source_image_ori": "Part2/Swap/73.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1640/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1101/2.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1101/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1640/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1640/loser.png", "save_id": 1101, "prompt_en": "Swap the positions and actions of these two people.", "prompt_cn": "交换这两个人的位置和动作", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1641, "sampling_model": "Bagel", "source_image_ori": "Part2/Swap/75.png", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1641/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1103/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Swap/1103/7.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1641/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1641/loser.png", "save_id": 1103, "prompt_en": "Swap the poses and states of the two cats.", "prompt_cn": "交换两个猫的动作和状态。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1642, "sampling_model": "Bagel", "source_image_ori": "Part2/Move/5.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1642/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/971/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/971/9.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1642/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1642/loser.png", "save_id": 971, "prompt_en": "Move the robot to the left.", "prompt_cn": "将机器人移动到左侧。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1643, "sampling_model": "Bagel", "source_image_ori": "Part2/Move/29.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1643/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/992/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/992/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1643/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1643/tie_2.png", "save_id": 992, "prompt_en": "Move the strawberry onto the fork next to the cake.", "prompt_cn": "将草莓移到蛋糕旁边的叉子上。", "label": "tie", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1644, "sampling_model": "Bagel", "source_image_ori": "Part2/Move/57.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1644/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/1008/10.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/1008/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1644/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1644/loser.png", "save_id": 1008, "prompt_en": "Move the yoga mat to the center of the room.", "prompt_cn": "把瑜伽垫移动到房间中心。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1645, "sampling_model": "Bagel", "source_image_ori": "Part2/Move/74.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1645/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/1021/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/1021/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1645/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1645/tie_2.png", "save_id": 1021, "prompt_en": "Move the sunglasses down to cover her eyes.", "prompt_cn": "将太阳镜向下移动,使其遮住她的眼睛。", "label": "tie", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1646, "sampling_model": "Bagel", "source_image_ori": "Part2/Move/94.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1646/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/1029/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel/en/Part2/Move/1029/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1646/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1646/loser.png", "save_id": 1029, "prompt_en": "Move the blue sphere to the right of the yellow cylinder.", "prompt_cn": "将蓝色球体移动到黄色圆柱的右边。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1647, "sampling_model": "Bagel", "source_image_ori": "Part2/Move/3.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1647/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/969/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/969/11.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1647/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1647/loser.png", "save_id": 969, "prompt_en": "Move the candies outside the jar into the box.", "prompt_cn": "将罐子外面的糖果移入盒子中。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1648, "sampling_model": "Bagel", "source_image_ori": "Part2/Move/4.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1648/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/970/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/970/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1648/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1648/tie_2.png", "save_id": 970, "prompt_en": "Move the chair to the right side of the sofa.", "prompt_cn": "将椅子移动到沙发的右侧。", "label": "tie", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1649, "sampling_model": "Bagel", "source_image_ori": "Part2/Move/7.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1649/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/973/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/973/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1649/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1649/loser.png", "save_id": 973, "prompt_en": "Move the coffee cup to the bottom-right corner of the image.", "prompt_cn": "将咖啡杯移动到右下角。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1650, "sampling_model": "Bagel", "source_image_ori": "Part2/Move/28.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1650/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/991/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Move/991/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1650/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1650/loser.png", "save_id": 991, "prompt_en": "Move the pen to the space above the notebook.", "prompt_cn": "将钢笔移动到笔记本上方的空白位置。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "IC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "IC": "IC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1651, "sampling_model": "Bagel", "source_image_ori": "Part1/image/2.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1651/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/3/8.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/3/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1651/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1651/loser.png", "save_id": 3, "prompt_en": "Add a Los Angeles Lakers Kobe Bryant jersey inside the wooden frame.", "prompt_cn": "在墙上挂着的木质画框内,添加一件洛杉矶湖人队科比·布莱恩特的球衣。 ", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1652, "sampling_model": "Bagel", "source_image_ori": "Part1/image/35.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1652/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/37/4.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/37/5.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1652/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1652/tie_2.png", "save_id": 37, "prompt_en": "Add a Starbucks iced latte with the logo visible on the table.", "prompt_cn": "在桌子上加入一杯带标志的星巴克冰拿铁 。", "label": "tie", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1653, "sampling_model": "Bagel", "source_image_ori": "Part1/image/59.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1653/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/60/6.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/60/1.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1653/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1653/loser.png", "save_id": 60, "prompt_en": "Place a Los Angeles Lakers LeBron James jersey on the yellow lounge chair.", "prompt_cn": "在黄色躺椅上放一件湖人队勒布朗·詹姆斯的球衣。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1654, "sampling_model": "Bagel", "source_image_ori": "Part1/image/65.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1654/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/66/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/66/6.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1654/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1654/tie_2.png", "save_id": 66, "prompt_en": "Add a Santa Claus figurine to the bottom-right corner.", "prompt_cn": "在右下角加入一个圣诞老人人偶。", "label": "tie", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1655, "sampling_model": "Bagel", "source_image_ori": "Part1/image/381.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1655/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/97/0.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part1/ADD/97/10.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1655/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1655/tie_2.png", "save_id": 97, "prompt_en": "ADD a small, red knitted beanie with a pom-pom onto the cat's head.", "prompt_cn": "ADD 在猫的头上加一顶带有白色绒球的小型红色针织帽", "label": "tie", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1656, "sampling_model": "Bagel", "source_image_ori": "Part2/Obj_interaction/7.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1656/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1112/5.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1112/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1656/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1656/tie_2.png", "save_id": 1112, "prompt_en": "Make the boy and girl hold hands.", "prompt_cn": "让这个男孩和这个女孩牵着手。", "label": "tie", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1657, "sampling_model": "Bagel", "source_image_ori": "Part2/Obj_interaction/7.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1657/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1112/1.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1112/4.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1657/tie_1.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1657/tie_2.png", "save_id": 1112, "prompt_en": "Make the boy and girl hold hands.", "prompt_cn": "让这个男孩和这个女孩牵着手。", "label": "tie", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1658, "sampling_model": "Bagel", "source_image_ori": "Part2/Obj_interaction/17.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1658/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1121/9.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1121/0.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1658/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1658/loser.png", "save_id": 1121, "prompt_en": "Have the man perform a bench press, lifting the barbell.", "prompt_cn": "让这个男人正在做卧推,举起杠铃。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" }, { "idx": 1659, "sampling_model": "Bagel", "source_image_ori": "Part2/Obj_interaction/27.jpg", "source_image": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1659/source.png", "winner_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1130/7.png", "loser_src": "/root/data/bxh/RM_Model_Infer/Bagel-Think/en/Part2/Object_Interaction/1130/3.png", "winner": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1659/winner.png", "loser": "/root/data/bxh/OmniBench_EVAL_RM/RM_Bench/VQ/1659/loser.png", "save_id": 1130, "prompt_en": "Make the boy brush his teeth.", "prompt_cn": "让这个男孩刷牙。", "label": "preference", "dimension": "VQ", "system_prompt_name": { "IF": "Single_image_prompt", "WA": "Single_image_prompt", "VC": "Single_image_prompt", "VQ": "Single_image_prompt" }, "parser_name": { "IF": "IF_Parser", "WA": "WA_Parser", "VC": "VC_Parser", "VQ": "VQ_Parser" }, "hints": "" } ]